diff --git a/.github/actions/appinspect_publish/Dockerfile b/.github/actions/appinspect_publish/Dockerfile
new file mode 100644
index 0000000..ded4720
--- /dev/null
+++ b/.github/actions/appinspect_publish/Dockerfile
@@ -0,0 +1,14 @@
+# Container image that runs your code
+FROM python:3-slim AS builder
+
+# Copies your code file from your action repository to the filesystem path `/` of the container
+ADD . /app
+WORKDIR /app
+
+RUN pip install --target=/app requests
+
+# Code file to execute when the docker container starts up (`entrypoint.sh`)
+FROM gcr.io/distroless/python3-debian10
+COPY --from=builder /app /app
+ENV PYTHONPATH /app
+CMD ["/app/publish.py"]
\ No newline at end of file
diff --git a/.github/actions/appinspect_publish/README.md b/.github/actions/appinspect_publish/README.md
new file mode 100644
index 0000000..99a7635
--- /dev/null
+++ b/.github/actions/appinspect_publish/README.md
@@ -0,0 +1,21 @@
+# Hello world javascript action
+
+This action prints "Hello World" or "Hello" + the name of a person to greet to the log.
+
+## Inputs
+
+## `who-to-greet`
+
+**Required** The name of the person to greet. Default `"World"`.
+
+## Outputs
+
+## `time`
+
+The time we greeted you.
+
+## Example usage
+
+uses: actions/hello-world-javascript-action@v1.1
+with:
+ who-to-greet: 'Mona the Octocat'
\ No newline at end of file
diff --git a/.github/actions/appinspect_publish/action.yml b/.github/actions/appinspect_publish/action.yml
new file mode 100644
index 0000000..ea0b2f4
--- /dev/null
+++ b/.github/actions/appinspect_publish/action.yml
@@ -0,0 +1,30 @@
+name: 'Hello World'
+description: 'Greet someone and record the time'
+inputs:
+ APP_ID:
+ description: 'App ID From Splunkbase'
+ required: true
+ default: '5596'
+ SPLUNK_USERNAME:
+ description: 'Splunkbase Username'
+ required: true
+ SPLUNK_PASSWORD:
+ description: 'Splunkbase Password'
+ required: true
+ APP_FILE:
+ description: 'The name of the file, for example "my_package.tar.gz".'
+ required: true
+ SPLUNK_VERSION:
+ description: 'The Splunk version(s) that the release is compatible with. For example, "8.0,8.1,8.2".'
+ required: true
+ VISIBILITY:
+ description: 'true = The release is to be visible upon package validation success. false = if the release is to be hidden.'
+ required: false
+ default: 'false'
+ CIM_VERSIONS:
+ description: 'The CIM version(s) that the release is compatible with. For example, "4.9,4.7".'
+ required: false
+ default: ''
+runs:
+ using: 'docker'
+ image: 'Dockerfile'
diff --git a/.github/actions/appinspect_publish/publish.py b/.github/actions/appinspect_publish/publish.py
new file mode 100644
index 0000000..8b8adcb
--- /dev/null
+++ b/.github/actions/appinspect_publish/publish.py
@@ -0,0 +1,35 @@
+import os
+import requests
+from requests.auth import HTTPBasicAuth
+
+APP_ID= os.environ['INPUT_APP_ID']
+filepath = os.environ['INPUT_APP_FILE']
+SPLUNK_USERNAME = os.environ['INPUT_SPLUNK_USERNAME']
+SPLUNK_PASSWORD = os.environ['INPUT_SPLUNK_PASSWORD']
+SPLUNK_VERSION = os.environ['INPUT_SPLUNK_VERSION']
+VISIBILITY = os.environ['INPUT_VISIBILITY']
+CIM_VERSIONS = os.environ['INPUT_CIM_VERSIONS']
+
+api_path = 'https://splunkbase.splunk.com/api/v1/app/{}/new_release'.format(APP_ID)
+
+auth = HTTPBasicAuth(SPLUNK_USERNAME, SPLUNK_PASSWORD)
+
+files = {
+ 'files[]': open(filepath, 'rb'),
+ 'filename': (None, os.path.basename(filepath)),
+ 'splunk_versions': (None, SPLUNK_VERSION),
+ 'visibility': (None, VISIBILITY),
+ 'cim_versions': (None, CIM_VERSIONS)
+}
+
+response = requests.post(api_path, files=files, auth=auth)
+
+print(response.status_code)
+print(response.text)
+
+# if status code is not 200, print the response text
+if response.status_code != 200:
+ response.raise_for_status()
+ exit(response.status_code)
+else:
+ exit(0)
diff --git a/.github/actions/log_to_splunk/Dockerfile b/.github/actions/log_to_splunk/Dockerfile
new file mode 100644
index 0000000..543e192
--- /dev/null
+++ b/.github/actions/log_to_splunk/Dockerfile
@@ -0,0 +1,14 @@
+# Container image that runs your code
+FROM python:3-slim AS builder
+
+# Copies your code file from your action repository to the filesystem path `/` of the container
+ADD . /app
+WORKDIR /app
+
+RUN pip install --target=/app requests
+
+# Code file to execute when the docker container starts up (`entrypoint.sh`)
+FROM gcr.io/distroless/python3-debian10
+COPY --from=builder /app /app
+ENV PYTHONPATH /app
+CMD ["/app/main.py"]
diff --git a/.github/actions/log_to_splunk/LICENSE b/.github/actions/log_to_splunk/LICENSE
new file mode 100644
index 0000000..4fc208e
--- /dev/null
+++ b/.github/actions/log_to_splunk/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 Splunk GitHub
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/.github/actions/log_to_splunk/README.md b/.github/actions/log_to_splunk/README.md
new file mode 100644
index 0000000..3b5e9fb
--- /dev/null
+++ b/.github/actions/log_to_splunk/README.md
@@ -0,0 +1,2 @@
+# POST GitHub Workflow Logs to Splunk HTTP Event Collector
+test
diff --git a/.github/actions/log_to_splunk/action.yml b/.github/actions/log_to_splunk/action.yml
new file mode 100644
index 0000000..67b803d
--- /dev/null
+++ b/.github/actions/log_to_splunk/action.yml
@@ -0,0 +1,28 @@
+# action.yml
+name: 'Send Workflow Logs to Splunk'
+description: 'Upload GitHub Workflow logs to Splunk HEC'
+inputs:
+ splunk-url:
+ description: 'Full URL for Splunk HEC endpoint'
+ required: true
+ hec-token:
+ description: 'Splunk HEC Token'
+ required: true
+ github-token:
+ description: 'Github PAT'
+ required: true
+ sourcetype:
+ description: 'Splunk Sourcetype'
+ default: 'github_workflow_log_action'
+ source:
+ description: 'GitHub Workflow name'
+ default: ${{ github.workflow }}
+ workflowID:
+ description: 'The Workflow Run number'
+ default: ${{ github.run_number}}
+outputs:
+ status:
+ description: 'value is success/fail based on POST result'
+runs:
+ using: 'docker'
+ image: 'Dockerfile'
diff --git a/.github/actions/log_to_splunk/main.py b/.github/actions/log_to_splunk/main.py
new file mode 100644
index 0000000..8c127a6
--- /dev/null
+++ b/.github/actions/log_to_splunk/main.py
@@ -0,0 +1,146 @@
+import os
+import requests
+import json
+import zipfile
+import io
+import glob
+import re
+from datetime import datetime
+
+def main():
+
+ GITHUB_REF=os.environ["GITHUB_REF"]
+ GITHUB_REPOSITORY=os.environ["GITHUB_REPOSITORY"]
+ GITHUB_RUN_ID=os.environ["GITHUB_RUN_ID"]
+ GITHUB_API_URL=os.environ["GITHUB_API_URL"]
+ GITHUB_WORKFLOWID=os.environ["INPUT_WORKFLOWID"]
+ GITHUB_TOKEN = os.environ.get("INPUT_GITHUB-TOKEN")
+
+ SPLUNK_HEC_URL=os.environ["INPUT_SPLUNK-URL"]+"services/collector/event"
+ SPLUNK_HEC_TOKEN=os.environ["INPUT_HEC-TOKEN"]
+ SPLUNK_SOURCE=os.environ["INPUT_SOURCE"]
+ SPLUNK_SOURCETYPE=os.environ["INPUT_SOURCETYPE"]
+
+ batch = count = 0
+ eventBatch = ""
+ headers = {"Authorization": "Splunk "+SPLUNK_HEC_TOKEN}
+ host=os.uname()[1]
+
+ summary_url = "{url}/repos/{repo}/actions/runs/{run_id}".format(url=GITHUB_API_URL,repo=GITHUB_REPOSITORY,run_id=GITHUB_WORKFLOWID)
+
+ try:
+ x = requests.get(summary_url, stream=True, auth=('token',GITHUB_TOKEN))
+ x.raise_for_status()
+ except requests.exceptions.HTTPError as errh:
+ output = "GITHUB API Http Error:" + str(errh)
+ print(f"Error: {output}")
+ print(f"::set-output name=result::{output}")
+ return x.status_code
+ except requests.exceptions.ConnectionError as errc:
+ output = "GITHUB API Error Connecting:" + str(errc)
+ print(f"Error: {output}")
+ print(f"::set-output name=result::{output}")
+ return x.status_code
+ except requests.exceptions.Timeout as errt:
+ output = "Timeout Error:" + str(errt)
+ print(f"Error: {output}")
+ print(f"::set-output name=result::{output}")
+ return x.status_code
+ except requests.exceptions.RequestException as err:
+ output = "GITHUB API Non catched error conecting:" + str(err)
+ print(f"Error: {output}")
+ print(f"::set-output name=result::{output}")
+ return x.status_code
+ except Exception as e:
+ print("Internal error", e)
+ return x.status_code
+
+ summary = x.json()
+
+ summary.pop('repository')
+
+ summary["repository"]=summary["head_repository"]["name"]
+ summary["repository_full"]=summary["head_repository"]["full_name"]
+
+ summary.pop('head_repository')
+
+ utc_time = datetime.strptime(summary["updated_at"], "%Y-%m-%dT%H:%M:%SZ")
+ epoch_time = (utc_time - datetime(1970, 1, 1)).total_seconds()
+
+ event={'event':json.dumps(summary),'sourcetype':SPLUNK_SOURCETYPE,'source':'workflow_summary','host':host,'time':epoch_time}
+ event=json.dumps(event)
+
+ x=requests.post(SPLUNK_HEC_URL, data=event, headers=headers)
+
+
+ url = "{url}/repos/{repo}/actions/runs/{run_id}/logs".format(url=GITHUB_API_URL,repo=GITHUB_REPOSITORY,run_id=GITHUB_WORKFLOWID)
+ print(url)
+
+ try:
+ x = requests.get(url, stream=True, auth=('token',GITHUB_TOKEN))
+
+ except requests.exceptions.HTTPError as errh:
+ output = "GITHUB API Http Error:" + str(errh)
+ print(f"Error: {output}")
+ print(f"::set-output name=result::{output}")
+ return
+ except requests.exceptions.ConnectionError as errc:
+ output = "GITHUB API Error Connecting:" + str(errc)
+ print(f"Error: {output}")
+ print(f"::set-output name=result::{output}")
+ return
+ except requests.exceptions.Timeout as errt:
+ output = "Timeout Error:" + str(errt)
+ print(f"Error: {output}")
+ print(f"::set-output name=result::{output}")
+ return
+ except requests.exceptions.RequestException as err:
+ output = "GITHUB API Non catched error conecting:" + str(err)
+ print(f"Error: {output}")
+ print(f"::set-output name=result::{output}")
+ return
+
+ z = zipfile.ZipFile(io.BytesIO(x.content))
+ z.extractall('/app')
+
+ timestamp = batch = count = 0
+
+ for name in glob.glob('/app/*.txt'):
+ logfile = open(os.path.join(os.path.dirname(os.path.abspath(__file__)), name.replace('./','')),'r')
+ Lines = logfile.readlines()
+ for line in Lines:
+
+ if line:
+ count+=1
+ if timestamp:
+ t2=timestamp
+ timestamp = re.search("\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z",line.strip())
+
+ if timestamp:
+ timestamp = re.sub("\dZ","",timestamp.group())
+ timestamp = datetime.strptime(timestamp,"%Y-%m-%dT%H:%M:%S.%f")
+ timestamp = (timestamp - datetime(1970,1,1)).total_seconds()
+ else:
+ timestamp=t2
+
+ x = re.sub("\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z","",line.strip())
+ x=x.strip()
+ job_name=re.search("\/\d+\_(?P.*)\.txt",name)
+ job_name=job_name.group('job')
+ fields = {'lineNumber':count,'workflowID':GITHUB_WORKFLOWID,'job':job_name}
+ if x:
+ batch+=1
+ event={'event':x,'sourcetype':SPLUNK_SOURCETYPE,'source':SPLUNK_SOURCE,'host':host,'time':timestamp,'fields':fields}
+ eventBatch=eventBatch+json.dumps(event)
+ else:
+ print("skipped line "+str(count))
+
+ if batch>=1000:
+ batch=0
+ x=requests.post(SPLUNK_HEC_URL, data=eventBatch, headers=headers)
+ eventBatch=""
+
+ x=requests.post(SPLUNK_HEC_URL, data=eventBatch, headers=headers)
+
+if __name__ == '__main__':
+ main()
diff --git a/.github/actions/post_logs_to_splunk_hec/Dockerfile b/.github/actions/post_logs_to_splunk_hec/Dockerfile
new file mode 100644
index 0000000..63b5160
--- /dev/null
+++ b/.github/actions/post_logs_to_splunk_hec/Dockerfile
@@ -0,0 +1,8 @@
+# Container image that runs your code
+FROM python:3.8-slim-buster
+
+# Copies your code file from your action repository to the filesystem path `/` of the container
+COPY entrypoint.sh /entrypoint.sh
+
+# Code file to execute when the docker container starts up (`entrypoint.sh`)
+ENTRYPOINT ["/entrypoint.sh"]
diff --git a/.github/actions/post_logs_to_splunk_hec/README.md b/.github/actions/post_logs_to_splunk_hec/README.md
new file mode 100644
index 0000000..3b5e9fb
--- /dev/null
+++ b/.github/actions/post_logs_to_splunk_hec/README.md
@@ -0,0 +1,2 @@
+# POST GitHub Workflow Logs to Splunk HTTP Event Collector
+test
diff --git a/.github/actions/post_logs_to_splunk_hec/action.yml b/.github/actions/post_logs_to_splunk_hec/action.yml
new file mode 100644
index 0000000..131ce24
--- /dev/null
+++ b/.github/actions/post_logs_to_splunk_hec/action.yml
@@ -0,0 +1,31 @@
+# action.yml
+name: 'Post Logs to Splunk HEC'
+description: 'Upload GitHub Workflow logs to Splunk HEC'
+inputs:
+ splunk-url:
+ description: 'Full URL for Splunk HEC endpoint'
+ required: true
+ hec-token:
+ description: 'Splunk HEC Token'
+ required: true
+ sourcetype:
+ description: 'Splunk Sourcetype'
+ default: 'github_workflow_log_job'
+ source:
+ description: 'GitHub Workflow name'
+ default: ${{ github.workflow }}
+ workflowID:
+ description: 'The Workflow Run number'
+ default: ${{ github.run_id}}
+outputs:
+ status:
+ description: 'value is success/fail based on app inspect result'
+runs:
+ using: 'docker'
+ image: 'Dockerfile'
+ args:
+ - ${{ inputs.splunk-url }}
+ - ${{ inputs.hec-token }}
+ - ${{ inputs.sourcetype }}
+ - ${{ inputs.source }}
+ - ${{ inputs.workflowID }}
diff --git a/.github/actions/post_logs_to_splunk_hec/entrypoint.sh b/.github/actions/post_logs_to_splunk_hec/entrypoint.sh
new file mode 100755
index 0000000..e631a1e
--- /dev/null
+++ b/.github/actions/post_logs_to_splunk_hec/entrypoint.sh
@@ -0,0 +1,46 @@
+#!/bin/sh -l
+
+python3 -m pip install requests
+echo "
+import os
+import requests
+import re
+from datetime import datetime
+import json
+
+logfile = open(os.path.join(os.path.dirname(os.path.abspath(__file__)), \"file.log\"),'r')
+Lines = logfile.readlines()
+
+batch = count = 0
+url = \"$1services/collector/event\"
+token=\"$2\"
+headers = {\"Authorization\": \"Splunk \"+token}
+sourcetype = \"$3\"
+eventBatch = \"\"
+workflowID=\"$5\"
+source=\"$4\"
+host=\"$HOSTNAME\"
+
+for line in Lines:
+ count+=1
+ timestamp = re.search(\"\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z\",line.strip())
+ timestamp = re.sub(\"\dZ\",\"\",timestamp.group())
+ timestamp = datetime.strptime(timestamp,\"%Y-%m-%dT%H:%M:%S.%f\")
+ timestamp = (timestamp - datetime(1970,1,1)).total_seconds()
+ x = re.sub(\"\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z\",\"\",line.strip())
+ x=x.strip()
+ fields = {'lineNumber':count,'workflowID':workflowID}
+ if x:
+ batch+=1
+ event={'event':x,'sourcetype':sourcetype,'source':source,'host':host,'time':timestamp,'fields':fields}
+ eventBatch=eventBatch+json.dumps(event)
+ else:
+ print(\"skipped line \"+str(count))
+
+ if batch>=1000:
+ batch=0
+ x=requests.post(url, data=eventBatch, headers=headers)
+ eventBatch=\"\"
+
+x=requests.post(url, data=eventBatch, headers=headers)" > t.py
+python3 t.py
diff --git a/.github/workflows/appinspect_api.yml b/.github/workflows/appinspect_api.yml
index 99a88cd..2053040 100644
--- a/.github/workflows/appinspect_api.yml
+++ b/.github/workflows/appinspect_api.yml
@@ -45,3 +45,17 @@ jobs:
splunkUser: ${{ secrets.SPLUNKBASE_USER }}
splunkPassword: ${{ secrets.SPLUNKBASE_PASSWORD }}
includedTags: cloud
+ - name: Release
+ uses: fnkr/github-action-ghr@v1
+ if: startsWith(github.ref, 'refs/tags/')
+ env:
+ GHR_PATH: ./dist/github_app_for_splunk.spl
+ GITHUB_TOKEN: ${{ secrets.API_TOKEN }}
+ - name: Publish App to Splunkbase
+ uses: ./.github/actions/appinspect_publish # Uses an action in the root directory
+ with:
+ APP_ID: '5596'
+ APP_FILE: './dist/github_app_for_splunk.spl'
+ SPLUNK_USERNAME: ${{ secrets.SPLUNKBASE_USER }}
+ SPLUNK_PASSWORD: ${{ secrets.SPLUNKBASE_PASSWORD }}
+ SPLUNK_VERSION: '8.0,8.1,8.2,9.0'
diff --git a/.github/workflows/appinspect_cli.yml b/.github/workflows/appinspect_cli.yml
index 06517ad..4ed053d 100644
--- a/.github/workflows/appinspect_cli.yml
+++ b/.github/workflows/appinspect_cli.yml
@@ -20,6 +20,14 @@ jobs:
steps:
- uses: actions/checkout@v2
+ - name: Set up Python 3.x
+ uses: actions/setup-python@v4
+ with:
+ # Semantic version range syntax or exact version of a Python version
+ python-version: '3.9'
+ # Optional - x64 or x86 architecture, defaults to x64
+ architecture: 'x64'
+
- name: Install deps
uses: CultureHQ/actions-yarn@master
with:
@@ -35,10 +43,6 @@ jobs:
with:
args: build
- - name: Update Permissions
- run: |
- chmod +x ./.github/actions/appinspect_cli/entrypoint.sh
-
- name: Update Version Number
run: |
old_str="X.Y.Z"
@@ -46,7 +50,23 @@ jobs:
sed -i "s/$old_str/$new_str/g" package.json
sed -i "s/$old_str/$new_str/g" ./github_app_for_splunk/default/app.conf
+ - name: Install slim
+ run: |
+ pip install https://download.splunk.com/misc/packaging-toolkit/splunk-packaging-toolkit-1.0.1.tar.gz
+
+ - name: Create package
+ run: |
+ mkdir build
+ slim package ./github_app_for_splunk
+
- name: Run App Inspect CLI
- uses: ./.github/actions/appinspect_cli
+ uses: splunk/appinspect-cli-action@v1.5
+ with:
+ app_path: github_app_for_splunk-1.0.0.tar.gz
+ included_tags: cloud, splunk_appinspect
+
+ - name: Upload package
+ uses: actions/upload-artifact@v3
with:
- app-path: github_app_for_splunk/
+ name: github_app_for_splunk-1.0.0.tar.gz
+ path: ./github_app_for_splunk-1.0.0.tar.gz
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
new file mode 100644
index 0000000..6078826
--- /dev/null
+++ b/.github/workflows/codeql-analysis.yml
@@ -0,0 +1,71 @@
+# For most projects, this workflow file will not need changing; you simply need
+# to commit it to your repository.
+#
+# You may wish to alter this file to override the set of languages analyzed,
+# or to provide custom queries or build logic.
+#
+# ******** NOTE ********
+# We have attempted to detect the languages in your repository. Please check
+# the `language` matrix defined below to confirm you have the correct set of
+# supported CodeQL languages.
+#
+name: "CodeQL"
+
+on:
+ push:
+ branches: [ main ]
+ pull_request:
+ # The branches below must be a subset of the branches above
+ branches: [ main ]
+ schedule:
+ - cron: '19 20 * * 2'
+
+jobs:
+ analyze:
+ name: Analyze
+ runs-on: ubuntu-latest
+ permissions:
+ actions: read
+ contents: read
+ security-events: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ language: [ 'javascript' ]
+ # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
+ # Learn more:
+ # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v1
+ with:
+ languages: ${{ matrix.language }}
+ # If you wish to specify custom queries, you can do so here or in a config file.
+ # By default, queries listed here will override any specified in a config file.
+ # Prefix the list here with "+" to use these queries and those in the config file.
+ # queries: ./path/to/local/query, your-org/your-repo/queries@main
+
+ # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
+ # If this step fails, then you should remove it and run the build manually (see below)
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v1
+
+ # âšī¸ Command-line programs to run using the OS shell.
+ # đ https://git.io/JvXDl
+
+ # âī¸ If the Autobuild fails above, remove it and uncomment the following three lines
+ # and modify them (or add more) to build your code if your project
+ # uses a compiled language
+
+ #- run: |
+ # make bootstrap
+ # make release
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v1
diff --git a/.github/workflows/log_to_splunk.yml b/.github/workflows/log_to_splunk.yml
new file mode 100644
index 0000000..1a708f1
--- /dev/null
+++ b/.github/workflows/log_to_splunk.yml
@@ -0,0 +1,33 @@
+name: Send Workflow Logs to Splunk
+
+# Controls when the action will run.
+on:
+ workflow_run:
+ workflows: ["*"]
+ types:
+ - completed
+
+env:
+ triggerID: ${{ github.event.workflow_run.id }}
+ triggerJob: ${{ github.event.workflow_run.name }}
+
+jobs:
+ WriteLogs:
+ runs-on: ubuntu-latest
+ if: ${{ github.event.workflow_run.name!='WriteLogs'}}
+
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Output Job ID
+ run: echo ${{ github.event.workflow_run.id }}
+
+ - name: Send Workflow logs to Splunk
+ if: ${{ always() }}
+ uses: ./.github/actions/log_to_splunk
+ with:
+ splunk-url: ${{ secrets.HEC_URL }}
+ hec-token: ${{ secrets.HEC_TOKEN }}
+ github-token: ${{ secrets.API_TOKEN }}
+ workflowID: ${{ env.triggerID }}
+ source: ${{ env.triggerJob }}
diff --git a/.github/workflows/scorecards-analysis.yml b/.github/workflows/scorecards-analysis.yml
new file mode 100644
index 0000000..8b491e4
--- /dev/null
+++ b/.github/workflows/scorecards-analysis.yml
@@ -0,0 +1,54 @@
+name: Scorecards supply-chain security
+on:
+ # Only the default branch is supported.
+ branch_protection_rule:
+ schedule:
+ - cron: '19 20 * * 2'
+
+
+# Declare default permissions as read only.
+permissions: read-all
+
+jobs:
+ analysis:
+ name: Scorecards analysis
+ runs-on: ubuntu-latest
+ permissions:
+ # Needed to upload the results to code-scanning dashboard.
+ security-events: write
+ actions: read
+ contents: read
+
+ steps:
+ - name: "Checkout code"
+ uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # v2.4.0
+ with:
+ persist-credentials: false
+
+ - name: "Run analysis"
+ uses: ossf/scorecard-action@c8416b0b2bf627c349ca92fc8e3de51a64b005cf # v1.0.2
+ with:
+ results_file: results.sarif
+ results_format: sarif
+ # Read-only PAT token. To create it,
+ # follow the steps in https://github.com/ossf/scorecard-action#pat-token-creation.
+ repo_token: ${{ secrets.SCORECARD_READ_TOKEN }}
+ # Publish the results to enable scorecard badges. For more details, see
+ # https://github.com/ossf/scorecard-action#publishing-results.
+ # For private repositories, `publish_results` will automatically be set to `false`,
+ # regardless of the value entered here.
+ publish_results: true
+
+ # Upload the results as artifacts (optional).
+ - name: "Upload artifact"
+ uses: actions/upload-artifact@82c141cc518b40d92cc801eee768e7aafc9c2fa2 # v2.3.1
+ with:
+ name: SARIF file
+ path: results.sarif
+ retention-days: 5
+
+ # Upload the results to GitHub's code scanning dashboard.
+ - name: "Upload to code-scanning"
+ uses: github/codeql-action/upload-sarif@5f532563584d71fdef14ee64d17bafb34f751ce5 # v1.0.26
+ with:
+ sarif_file: results.sarif
diff --git a/.gitignore b/.gitignore
index 2d3f6b3..11a4e9c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -114,7 +114,6 @@ out
# Nuxt.js build / generate output
.nuxt
-dist
# Gatsby files
.cache/
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..4fc208e
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 Splunk GitHub
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
index a9d6f50..07b4ac2 100644
--- a/README.md
+++ b/README.md
@@ -1,42 +1,44 @@
-# Github App for Splunk
+# GitHub App for Splunk
-The Github App for Splunk is a collection of out of the box dashboards and Splunk knowledge objects designed to give Github Admins and platform owners immediate visibility into Github.
+The GitHub App for Splunk is a collection of out of the box dashboards and Splunk knowledge objects designed to give GitHub Admins and platform owners immediate visibility into GitHub.
-This App is designed to work across multiple Github data sources however not all all required. You may choose to only collect a certain set of data and the parts of this app that utilize that set will function, while those that use other data sources will not function correctly, so please only use the Dashboards that relate to the data you are collecting.
+This App is designed to work across multiple GitHub data sources however not all all required. You may choose to only collect a certain set of data and the parts of this app that utilize that set will function, while those that use other data sources will not function correctly, so please only use the Dashboards that relate to the data you are collecting.
-The Github App for Splunk is designed to work with the following data sources:
+The GitHub App for Splunk is designed to work with the following data sources:
-* [Github Audit Log Monitoring Add-On For Splunk](./docs/ghe_audit_logs.md): Audit logs from Github Enterprise Cloud.
-* [Github.com Webhooks](./docs/github_webhooks.md): A select set of webhook events like Push, PullRequest, and Repo.
-* [Github Enterprise Server Syslog Forwarder](https://docs.github.com/en/enterprise-server/admin/user-management/monitoring-activity-in-your-enterprise/log-forwarding): Audit and Application logs from Github Enterprise Server.
-* [Github Enterprise Collectd monitoring](./docs/splunk_collectd_forwarding_for_ghes.md): Performance and Infrastructure metrics from Github Enterprise Server.
+* [GitHub Audit Log Collection](./docs/ghe_audit_logs.MD): Audit logs from GitHub Enterprise Cloud and Server.
+* [Github.com Webhooks](./docs/github_webhooks.MD): A select set of webhook events like Push, PullRequest, Code Scanning and Repo.
+* [Github Enterprise Collectd monitoring](./docs/splunk_collectd_forwarding_for_ghes.MD): Performance and Infrastructure metrics from Github Enterprise Server.
## Dashboard Instructions
### Installation
-The Github App for Splunk is available for download from [Splunkbase](https://splunkbase.splunk.com/app/5596/). For Splunk Cloud, refer to [Install apps in your Splunk Cloud deployment](https://docs.splunk.com/Documentation/SplunkCloud/latest/Admin/SelfServiceAppInstall). For non-Splunk Cloud deployments, refer to the standard methods for Splunk Add-on installs as documented for a [Single Server Install](http://docs.splunk.com/Documentation/AddOns/latest/Overview/Singleserverinstall) or a [Distributed Environment Install](http://docs.splunk.com/Documentation/AddOns/latest/Overview/Distributedinstall).
+The GitHub App for Splunk is available for download from [Splunkbase](https://splunkbase.splunk.com/app/5596/). For Splunk Cloud, refer to [Install apps in your Splunk Cloud deployment](https://docs.splunk.com/Documentation/SplunkCloud/latest/Admin/SelfServiceAppInstall). For non-Splunk Cloud deployments, refer to the standard methods for Splunk Add-on installs as documented for a [Single Server Install](http://docs.splunk.com/Documentation/AddOns/latest/Overview/Singleserverinstall) or a [Distributed Environment Install](http://docs.splunk.com/Documentation/AddOns/latest/Overview/Distributedinstall).
**This app should be installed on both your search head tier as well as your indexer tier.**
-
+
### Configuration

-1. The Github App for Splunk uses macros so that index and `sourcetype` names don't need to be updated in each dashboard panel. You'll need to update the macros to account for your selected indexes.
-1. The macro `github_source` is the macro for all audit log events, whether from Github Enterprise Cloud or Server. The predefined macro includes examples of **BOTH**. Update to account for your specific needs.
+1. The GitHub App for Splunk uses macros so that index and `sourcetype` names don't need to be updated in each dashboard panel. You'll need to update the macros to account for your selected indexes.
+1. The macro `github_source` is the macro for all audit log events, whether from GitHub Enterprise Cloud or Server. The predefined macro includes examples of **BOTH**. Update to account for your specific needs.
1. The macro `github_webhooks` is the macro used for all webhook events. Since it is assuming a single index for all webhook events, that is the predefined example, but update as needed.
-1. Finally, the macro `github_collectd` is the macro used for all `collectd` metrics sent from Github Enterprise Server. Please update accordingly.
+1. Finally, the macro `github_collectd` is the macro used for all `collectd` metrics sent from GitHub Enterprise Server. Please update accordingly.
### Integration Overview dashboard
-There is an *Integration Overview* dashboard listed under *Dashboards* that allows you to monitor API rate limits, audit events fetched, or webhooks received. This dashboard is primarily meant to be used with the `Github Audit Log Monitoring Add-On for Splunk` and uses internal Splunk logs. To be able to view them you will probably need elevated privileges in Splunk that include access to the `_internal` index. Please coordinate with your Splunk team if that dashboard is desired.
+There is an *Integration Overview* dashboard listed under *Dashboards* that allows you to monitor API rate limits, audit events fetched, or webhooks received. This dashboard is primarily meant to be used with the `GitHub Audit Log Monitoring Add-On for Splunk` and uses internal Splunk logs. To be able to view them you will probably need elevated privileges in Splunk that include access to the `_internal` index. Please coordinate with your Splunk team if that dashboard is desired.
### Examples
Expand for screenshots
+#### Code Scanning Alerts
+ 
+
#### Audit Log Dashboard

@@ -59,4 +61,4 @@ There is an *Integration Overview* dashboard listed under *Dashboards* that allo
## Support
-Support for Github App for Splunk is run through [Github Issues](https://github.com/splunk/github_app_for_splunk/issues). Please open a new issue for any support issues or for feature requests. You may also open a Pull Request if you'd like to contribute additional dashboards, eventtypes for webhooks, or enhancements you may have.
+Support for GitHub App for Splunk is run through [GitHub Issues](https://github.com/splunk/github_app_for_splunk/issues). Please open a new issue for any support issues or for feature requests. You may also open a Pull Request if you'd like to contribute additional dashboards, eventtypes for webhooks, or enhancements you may have.
diff --git a/docs/ghe_audit_logs.MD b/docs/ghe_audit_logs.MD
index 60c6a46..34b0819 100644
--- a/docs/ghe_audit_logs.MD
+++ b/docs/ghe_audit_logs.MD
@@ -1,46 +1,13 @@
# GitHub Enterprise Audit Log Monitoring
-> Splunk modular input plugin to fetch the enterprise audit log from GitHub Enterprise
-
-Support for modular inputs in Splunk Enterprise 5.0 and later enables you to add new types of inputs to Splunk Enterprise that are treated as native Splunk Enterprise inputs.
-
-This modular input makes an HTTPS request to the GitHub Enterprise's Audit Log REST API endpoint at a definable interval to fetch audit log data.
-
-
-
## Prerequisites
-- Splunk v7.3.5+
-- Python 2.7+
- - Successfully tested with python 3.x but support is not guaranteed yet
+- Splunk v8.x+
+- Python 3.x
## Installation
-1. SSH to your Splunk server
-
-2. Download the latest release from [Releases](https://github.com/splunk/github-audit-log-monitoring-add-on-for-splunk/releases)
-
-3. Copy the tarball to the apps directory and extract it:
-
- ```sh
- $ cp splunk-ghe-audit-log-monitoring-.tar.gz $SPLUNK_HOME/etc/apps/
-
- $ mkdir -p $SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring
-
- $ tar xf $SPLUNK_HOME/etc/apps/splunk-ghe-audit-log-monitoring-.tar.gz -C $SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring --strip-components=1
-
- # Optional depending on the user executing the previous actions
- $ sudo chown -R splunk:splunk $SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring
-
- # Make the state directory writable by the group
- $ sudo chmod -R 775 /opt/splunk/etc/apps/ghe_audit_log_monitoring/state
- ```
-
-4. Restart the Splunk server
-
-5. Generate a Personal Access Token in GitHub Enterprise with the `site_admin` scope.
-
-6. Configure and the GitHub Enterprise Audit Log Monitoring by entering the necessary information in the input fields
+Installation and configuration documents for the [Splunk Add-on for GitHub](https://docs.splunk.com/Documentation/AddOns/released/GitHub/About) is available in our official Splunk docs. This add-on can be used for both GitHub Enterprise Cloud and Server. To configure for each specific environment, please refer to the official docs.
## Configuration
@@ -52,6 +19,8 @@ The following are the required scopes for the personal access token allowing the
- [x] manage_billing:enterprise `Read and write enterprise billing data`
- [x] read:enterprise `Read enterprise profile data`
+** The PAT must be generated by someone that is an Enterprise Owner**
+
### Input Fields

@@ -64,9 +33,12 @@ The following are the required scopes for the personal access token allowing the
- **Hostname**
- - This is the hostname of your GitHub Enterprise instance. Make sure there are no trailing `/` in the URL provided. This could either be a FQDN or an IP address. Do not append any paths beyond the tld.
+ - This is the hostname of your GitHub Enterprise instance. Make sure there are no trailing `/` in the URL provided. This could either be a FQDN or an IP address. Do not append any paths beyond the tld. **Most Users Will Not Need to change this!**
- Example: [https://api.github.com](https://api.github.com)
+- **Account Type**
+ - This is the type of GitHub account you are using. GitHub Enterprise Cloud users should keep it at `enterprise`, however some users that only have an enterprise tier paid Organization should change it to `organization`. If you can't tell which you have, go to your user icon in GitHub in the upper right corner. If you have an entry listed as "Your enterprises", then you should use `enterprise`, otherwise use `organization`.
+
- **Enterprise**
- The enterprise name for which to fetch audit log events
@@ -124,7 +96,7 @@ This modular input fetches events by calling the [Enterprise Audit Log API](http
### Activity dashboard example
-Along with this modular input we're providing a [Github App for Splunk](https://github.com/splunk/github_app_for_splunk) that makes use of the collected audit log events to give you an overview of the activities across your enterprise.
+Along with this modular input we're providing a [GitHub App for Splunk](https://github.com/splunk/github_app_for_splunk) that makes use of the collected audit log events to give you an overview of the activities across your enterprise.
You can install it via the [Manage Apps page](https://docs.splunk.com/Documentation/Splunk/8.2.0/Admin/Deployappsandadd-ons).
@@ -151,30 +123,3 @@ If you've enabled debug mode be ready to change your personal access token becau
### Why can't I use a GitHub app instead of a personal access token?
GitHub apps cannot be installed on the enterprise level. The REST API requires enterprise admin privileges which are out of scope for GitHub apps.
-
-## Troubleshooting
-
-### Read logs in Splunk
-
-You can use this search query to fetch all the logs belonging to this module when **Debug Mode** is enabled.
-
-```sh
-index="_internal" source="/opt/splunk/var/log/splunk/splunkd.log" ghe_audit_log_monitoring
-```
-
-### Test the modular input for syntax problems
-
-Run this test if you don't see anything in the logs (which is a highly unlikely scenario). This will display any syntax errors if there are any.
-
-```sh
-sudo $SPLUNK_HOME/bin/splunk cmd python $SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring/bin/ghe_audit_log_monitoring.py
-```
-
-### Where are state files stored?
-
-State files for enterprises are stored in this directory:
-
-```sh
-$SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring/state/
-```
-Test
diff --git a/docs/ghes_syslog_setup.MD b/docs/ghes_syslog_setup.MD
new file mode 100644
index 0000000..5207419
--- /dev/null
+++ b/docs/ghes_syslog_setup.MD
@@ -0,0 +1,3 @@
+# Sending GitHub Enterprise Server Logs to Splunk
+
+GitHub Enterprise Server comes with syslog-ng built in to send data to platforms like Splunk and we can take advantage of that with the [Splunk Add-on for GitHub](https://splunkbase.splunk.com/app/6254/). Setup details and documentation is available on [Splunk Docs](https://docs.splunk.com/Documentation/AddOns/released/GitHub/About).
diff --git a/docs/github_webhooks.MD b/docs/github_webhooks.MD
index 12ea7d1..cd21373 100644
--- a/docs/github_webhooks.MD
+++ b/docs/github_webhooks.MD
@@ -1,10 +1,10 @@
-# Using Github Webhooks
+# Using GitHub Webhooks
-Github Webhooks are a great way to collect rich information as it occurs. You can easily enable webhooks within the Github UI and can even select specific actions on which to trigger a webhook call to Splunk. This is only available at the Organization level and will require this to be done for each Org as desired. To do so, you'll need to configure Splunk as a receiver and then setup the webhooks within Github.
+GitHub Webhooks are a great way to collect rich information as it occurs. You can easily enable webhooks within the GitHub UI and can even select specific actions on which to trigger a webhook call to Splunk. This is only available at the Organization level and will require this to be done for each Org as desired. To do so, you'll need to configure Splunk as a receiver and then setup the webhooks within GitHub.
## Configuring Splunk to receive Webhooks
-Splunk's HTTP Event Collector (HEC) is a quick and easy endpoint built to receive data from other producers like Github.
+Splunk's HTTP Event Collector (HEC) is a quick and easy endpoint built to receive data from other producers like GitHub.
### Setting Up Splunk to Listen for Webhooks
1. Under Settings > Data Inputs, click **HTTP Event Collector**
@@ -13,19 +13,19 @@ Splunk's HTTP Event Collector (HEC) is a quick and easy endpoint built to receiv
1. Unless required by your Splunk administrator, the rest of this page can be left as is and continue onto the next step.
1. You'll want to click `select` for Source Type, and a new selection box will appear below that.
1. Under the Application option, there should be an entry for `github_json`, however you may need to use the little search bar to find it.
-1. For App Context, you'll want to select **Splunk App for Github**
+1. For App Context, you'll want to select **Splunk App for GitHub**
1. Next select the index created for this data. If none exist, create a new Index. Names like `github` or the like are recommended, depending on corporate naming conventions.
1. Lastly, click the Review button and confirm the data is correct and hit Submit.
Your token is now available to collect data, however we'll need to enable that token to allow Query String Authentication using that token. For this, you'll need command line access to your Splunk environment or be using a deployment server to deploy apps to Splunk.
-To enable Query String Authentication, you'll need to update the `inputs.conf` file within the Splunk App for Github local directory. In that file, there will be a stanza with the name and value of the token you created. At the end of that stanza, you'll need to add `allowQueryStringAuth = true` and then restart Splunk. This is best done with the help of your Splunk team, so please reach out to them for assistance on this step.
+To enable Query String Authentication, you'll need to update the `inputs.conf` file within the Splunk App for GitHub local directory. In that file, there will be a stanza with the name and value of the token you created. At the end of that stanza, you'll need to add `allowQueryStringAuth = true` and then restart Splunk. This is best done with the help of your Splunk team, so please reach out to them for assistance on this step.
-### Setting Up Github Webhooks
+### Setting Up GitHub Webhooks
Webhooks are a simple push mechanism that will send an event each time the webhook is triggered. Unfortunately, Webhooks are unique to each Organization and will need to be setup for each Org as desired. To do this, a user will need to be an Admin for the Org.
-1. In your Github Organization Settings page, select Webhooks from the menu on the left.
+1. In your GitHub Organization Settings page, select Webhooks from the menu on the left.
1. On this page, you'll see all the existing Webhooks, click the **Add webhook** button to add one to send data to Splunk.
1. The Payload URL will be the Splunk HTTP Event Collector endpoint that was enabled above. It should look something like: `https://YOUR SPLUNK URL:8088/services/collector/raw?token=THE TOKEN FROM ABOVE`. The default port of 8088 may be different for your Splunk Environment, so please confirm the HEC port with your Splunk Admin team.
1. For Content Type, you'll want to select `application/json` as the best option.
@@ -41,27 +41,42 @@ Once that is complete and webhooks are triggering, you'll want to update the mac
Pull request opened, closed, reopened, edited, assigned, unassigned, review requested, review request removed, labeled, unlabeled, synchronized, ready for review, converted to draft, locked, unlocked, auto merge enabled, auto merge disabled, milestoned, or demilestoned.
-
Github::PullRequest::Review
+
GitHub::PullRequest::Review
Pull request reviews
Pull request review submitted, edited, or dismissed.
+
+
GitHub::CodeScanning
+
Code scanning alerts
+
Alerts identified by CodeQL and other 3rd party/OSS scanning tools.
+
+
+
GitHub::VulnerabilityAlert
+
Repository vulnerability alerts
+
Dependabot alert (aka dependency vulnerability alert) created, resolved, or dismissed on a repository.
+
+
+
GitHub::SecretScanning
+
Secret scanning alerts
+
Secrets scanning alert created, resolved, or reopened.
+
diff --git a/docs/images/79E9DCE3-B358-4BAC-9667-7866C2CE4D00.png b/docs/images/79E9DCE3-B358-4BAC-9667-7866C2CE4D00.png
index d9933d9..188b3e3 100644
Binary files a/docs/images/79E9DCE3-B358-4BAC-9667-7866C2CE4D00.png and b/docs/images/79E9DCE3-B358-4BAC-9667-7866C2CE4D00.png differ
diff --git a/docs/images/code_scanning_dashboard.png b/docs/images/code_scanning_dashboard.png
new file mode 100644
index 0000000..92135b9
Binary files /dev/null and b/docs/images/code_scanning_dashboard.png differ
diff --git a/docs/images/screenshots/BETA_GHES_Alerts.png b/docs/images/screenshots/BETA_GHES_Alerts.png
new file mode 100644
index 0000000..301d582
Binary files /dev/null and b/docs/images/screenshots/BETA_GHES_Alerts.png differ
diff --git a/docs/images/screenshots/BETA_authentication_dashboard.png b/docs/images/screenshots/BETA_authentication_dashboard.png
new file mode 100644
index 0000000..4113111
Binary files /dev/null and b/docs/images/screenshots/BETA_authentication_dashboard.png differ
diff --git a/docs/images/screenshots/BETA_security_alert_dashboard.png b/docs/images/screenshots/BETA_security_alert_dashboard.png
new file mode 100644
index 0000000..5c74c6a
Binary files /dev/null and b/docs/images/screenshots/BETA_security_alert_dashboard.png differ
diff --git a/docs/images/screenshots/BETA_value_stream_dashboard.png b/docs/images/screenshots/BETA_value_stream_dashboard.png
new file mode 100644
index 0000000..fe4a464
Binary files /dev/null and b/docs/images/screenshots/BETA_value_stream_dashboard.png differ
diff --git a/docs/images/screenshots/BETA_workflow_analytics_dashboard.png b/docs/images/screenshots/BETA_workflow_analytics_dashboard.png
new file mode 100644
index 0000000..f8e3ac8
Binary files /dev/null and b/docs/images/screenshots/BETA_workflow_analytics_dashboard.png differ
diff --git a/docs/images/screenshots/process_monitor_dashboard.png b/docs/images/screenshots/process_monitor_dashboard.png
new file mode 100644
index 0000000..51532c9
Binary files /dev/null and b/docs/images/screenshots/process_monitor_dashboard.png differ
diff --git a/docs/images/screenshots/system_health_dashboard.png b/docs/images/screenshots/system_health_dashboard.png
new file mode 100644
index 0000000..e224a86
Binary files /dev/null and b/docs/images/screenshots/system_health_dashboard.png differ
diff --git a/docs/splunk_collectd_forwarding_for_ghes.MD b/docs/splunk_collectd_forwarding_for_ghes.MD
index 0273458..d491b05 100644
--- a/docs/splunk_collectd_forwarding_for_ghes.MD
+++ b/docs/splunk_collectd_forwarding_for_ghes.MD
@@ -1,4 +1,4 @@
-# Splunk Collectd Forwarding for Github Enterprise Server
+# Splunk Collectd Forwarding for GitHub Enterprise Server
This guide describes how to enable collectd forwarding on GitHub Enterprise Server (GHES) using Splunk Enterprise (v8.0+).
diff --git a/docs/value_stream_stages.MD b/docs/value_stream_stages.MD
new file mode 100644
index 0000000..2f1f3f7
--- /dev/null
+++ b/docs/value_stream_stages.MD
@@ -0,0 +1,90 @@
+# Value Stream Mapping for DevSecOps
+
+To better understand the flow of work from ideation to release, we can define a map of the steps that work must go through.
+
+1. **Ideation** - Creating the initial idea for the work
+1. **Acknowledgement** - Accepting the work and planning for it
+1. **Working** - Working on the tasks
+1. **Merge** - Completing the work
+1. **Review** - Reviewing the completed work
+1. **Testing** - Testing the work and preparing for release
+1. **Pending** - Waiting for release
+1. **Released** - Work is released
+
+All work should fit into one of these categories and should reference an id for the Issue/Card/Task.
+
+## Stage Definitions
+
+### Ideation
+
+This is the initial request for work, typically a task of some kind in a project management system.
+
+These are the data types that would define this phase:
+- Issue creation
+- Project Card creation
+
+### Acknowledgement
+
+This is the phase where an idea has gone from the backlog, to a planning phase in a project management system.
+
+These are the data types that would define this phase:
+- Issue labeled as "to do"
+- Issue assigned a sprint/milestone
+- Project Card moved to a "to do" column
+
+### Working
+
+This is the phase where the idea becomes reality and is typically when code is being written.
+
+These are the data types that would define this phase:
+- Issue labeled as "in progress"
+- Project Card moved to an "in progress" column
+- Branch created
+- First Push to main branch
+
+### Merge
+
+This is the phase where work on the task has been completed.
+
+These are the data types that would define this phase:
+- Issue labeled as "Done"
+- Project Card moved to a "Done" column
+- Pull Request created
+- Last Push to main branch
+
+### Review
+
+This is the phase where a code change would be under review. The change would be complete but not ready for testing or release. This may or may not include CI testing or manual code reviews.
+
+These are the data types that would define this phase:
+- Issue labeled as "under review"
+- Project Card moved to a "under review" column
+- Pipeline/Workflow execution on a Pull Request begins
+- Pull Request created
+
+### Testing
+
+This is the phase where code is undergoing automated testing through a CI process. This is distinct from the Review phase as it should be part of the release testing, not merge testing. This data is typically found within the CI tools being used.
+
+These are the data types that would define this phase:
+- Pipeline execution starts/ends
+
+### Pending
+
+This is the phase where code has been created, reviewed, tested, and approved and is typically measured more by the separation of events than any specific data point.
+
+These are the data types that would define this phase:
+- Pipeline execution complete
+- Deployment started
+
+### Released
+
+This is the phase where the task is complete and is being used.
+
+These are the data types that would define this phase:
+- Issue closed
+- Project Card moved to "released" column
+- Kubernetes deployment
+- Artifact uploaded
+- Container pushed to registry
+- Other customer specific markers
diff --git a/github_app_for_splunk/README.md b/github_app_for_splunk/README.md
index e36796a..d6c1918 100644
--- a/github_app_for_splunk/README.md
+++ b/github_app_for_splunk/README.md
@@ -1,30 +1,30 @@
-# Github App for Splunk
+# GitHub App for Splunk
-The Github App for Splunk is a collection of out of the box dashboards and Splunk knowledge objects designed to give Github Admins and platform owners immediate visibility into Github.
+The GitHub App for Splunk is a collection of out of the box dashboards and Splunk knowledge objects designed to give GitHub Admins, platform owners, and Security Engineers immediate visibility into GitHub.
-This App is designed to work across multiple Github data sources however not all all required. You may choose to only collect a certain set of data and the parts of this app that utilize that set will function, while those that use other data sources will not function correctly, so please only use the Dashboards that relate to the data you are collecting.
+This App is designed to work across multiple GitHub data sources however not all all required. You may choose to only collect a certain set of data and the parts of this app that utilize that set will function, while those that use other data sources will not function correctly, so please only use the Dashboards that relate to the data you are collecting.
-The Github App for Splunk is designed to work with the following data sources:
+The GitHub App for Splunk is designed to work with the following data sources:
-* [Github Audit Log Monitoring Add-On For Splunk](./docs/ghe_audit_logs.MD): Audit logs from Github Enterprise Cloud.
-* [Github.com Webhooks]((./docs/github_webhooks.MD)): A select set of webhook events like Push, PullRequest, and Repo.
-* [Github Enterprise Server Syslog Forwarder](https://docs.github.com/en/enterprise-server@3.0/admin/user-management/monitoring-activity-in-your-enterprise/log-forwarding): Audit and Application logs from Github Enterprise Server.
-* [Github Enterprise Collectd monitoring](./docs/splunk_collectd_forwarding_for_ghes.MD): Performance and Infrastructure metrics from Github Enterprise Server.
+* [GitHub Audit Log Monitoring Add-On For Splunk](./docs/ghe_audit_logs.MD): Audit logs from GitHub Enterprise Cloud.
+* [Github.com Webhooks]((./docs/github_webhooks.MD)): A select set of webhook events like Push, PullRequest, Repo, and Code Scanning alerts.
+* [GitHub Enterprise Server Syslog Forwarder](https://docs.github.com/en/enterprise-server@3.0/admin/user-management/monitoring-activity-in-your-enterprise/log-forwarding): Audit and Application logs from GitHub Enterprise Server.
+* [GitHub Enterprise Collectd monitoring](./docs/splunk_collectd_forwarding_for_ghes.MD): Performance and Infrastructure metrics from GitHub Enterprise Server.
## Dashboard Instructions
-The Github App for Splunk is available for download from [Splunkbase](https://splunkbase.splunk.com/app/5596/). Once installed there are a couple steps needed to light up all the dashboards.
+The GitHub App for Splunk is available for download from [Splunkbase](https://splunkbase.splunk.com/app/5596/). Once installed there are a couple steps needed to light up all the dashboards.

-1. The Github App for Splunk uses macros so that index and sourcetype names don't need to be updated in each dashboard panel. You'll need to update the macros to account for your selected indexes.
-1. The macro `github_source` is the macro for all audit log events, whether from Github Enterprise Cloud or Server. The predefined maco includes examples of **BOTH**. Update to account for your specific needs.
+1. The GitHub App for Splunk uses macros so that index and sourcetype names don't need to be updated in each dashboard panel. You'll need to update the macros to account for your selected indexes.
+1. The macro `github_source` is the macro for all audit log events, whether from GitHub Enterprise Cloud or Server. The predefined maco includes examples of **BOTH**. Update to account for your specific needs.
1. The macro `github_webhooks` is the macro used for all webhook events. Since it is assuming a single index for all webhook events, that is the predefined example, but update as needed.
-1. Finally, the macro `github_collectd` is the macro used for all collectd metrics sent from Github Enterprise Server. Please update accordingly.
+1. Finally, the macro `github_collectd` is the macro used for all collectd metrics sent from GitHub Enterprise Server. Please update accordingly.
### Integration Overview dashboard
-There is an *Integration Overview* dashboard listed under *Dashboards* that allows you to monitor API rate limits, audit events fetched, or webhooks received. This dashboard is primarily meant to be used with the `Github Audit Log Monitoring Add-On for Splunk` and uses internal Splunk logs. To be able to view them you will probably need elevated privileges in Splunk that include access to the `_internal` index. Please coordinate with your Splunk team if that dashboard is desired.
+There is an *Integration Overview* dashboard listed under *Dashboards* that allows you to monitor API rate limits, audit events fetched, or webhooks received. This dashboard is primarily meant to be used with the `GitHub Audit Log Monitoring Add-On for Splunk` and uses internal Splunk logs. To be able to view them you will probably need elevated privileges in Splunk that include access to the `_internal` index. Please coordinate with your Splunk team if that dashboard is desired.
## Support
-Support for Github App for Splunk is run through [Github Issues](https://github.com/splunk/github_app_for_splunk/issues). Please open a new issue for any support issues or for feature requests. You may also open a Pull Request if you'd like to contribute additional dashboards, eventtypes for webhooks, or enhancements you may have.
+Support for GitHub App for Splunk is run through [GitHub Issues](https://github.com/splunk/github_app_for_splunk/issues). Please open a new issue for any support issues or for feature requests. You may also open a Pull Request if you'd like to contribute additional dashboards, eventtypes for webhooks, or enhancements you may have.
diff --git a/github_app_for_splunk/appserver/static/custom.css b/github_app_for_splunk/appserver/static/custom.css
new file mode 100644
index 0000000..697e6d4
--- /dev/null
+++ b/github_app_for_splunk/appserver/static/custom.css
@@ -0,0 +1,24 @@
+/* custom.css */
+
+/* Define icon styles */
+
+td.icon {
+ text-align: center;
+}
+
+td.icon i {
+ font-size: 30px;
+ text-shadow: 1px 1px #aaa;
+}
+
+td.icon .failure {
+ color: red;
+}
+
+td.icon .in_progress {
+ color: yellow;
+}
+
+td.icon .success {
+ color: green;
+}
diff --git a/github_app_for_splunk/appserver/static/example_customtables.js b/github_app_for_splunk/appserver/static/example_customtables.js
new file mode 100644
index 0000000..4ad3951
--- /dev/null
+++ b/github_app_for_splunk/appserver/static/example_customtables.js
@@ -0,0 +1,132 @@
+require([
+ "underscore",
+ "splunkjs/mvc",
+ "splunkjs/mvc/searchmanager",
+ "splunkjs/mvc/tableview",
+ "splunkjs/mvc/simplexml/ready!"
+], function(
+ _,
+ mvc,
+ SearchManager,
+ TableView
+) {
+
+ mvc.Components.revokeInstance("myCustomRowSearch");
+
+ // Set up search managers
+ var myCustomRowSearch = new SearchManager({
+ id: "myCustomRowSearch",
+ preview: true,
+ cache: true,
+ search: "`github_webhooks` \"workflow_run.name\"=\"*\" | spath \"repository.full_name\" | search repository.full_name=* | eval started=if(action=\"requested\",_time,NULL), completed=if(action=\"completed\",_time, NULL), created=round(strptime('workflow_run.created_at',\"%Y-%m-%dT%H:%M:%SZ\")) | stats latest(created) as created, latest(started) as started, latest(completed) as completed, latest(duration) as duration, latest(workflow_run.conclusion) as workflow_run.conclusion by repository.full_name,workflow_run.name,workflow_run.id | eval started=if(isnull(started), created, started) | eval duration=if(isnotnull(completed),tostring(completed-started,\"Duration\"),\"In Progress\") | rename workflow_run.conclusion as status, repository.full_name as \"Repository Name\", workflow_run.name as \"Workflow Name\", workflow_run.id as \"Run ID\" | table status, \"Repository Name\", \"Workflow Name\", \"Run ID\", duration,completed|sort completed|fields - completed",
+ earliest_time: mvc.tokenSafe("$timeTkn.earliest$"),
+ latest_time: mvc.tokenSafe("$timeTkn.latest$")
+ });
+
+ // Create a table for a custom row expander
+ var mycustomrowtable = new TableView({
+ id: "table-customrow",
+ managerid: "myCustomRowSearch",
+ drilldown: "none",
+ drilldownRedirect: false,
+ el: $("#table-customrow")
+ });
+
+ // Define icons for the custom table cell
+ var ICONS = {
+ failure: "error",
+ in_progress: "question-circle",
+ success: "check-circle"
+ };
+
+ // Use the BaseCellRenderer class to create a custom table cell renderer
+ var CustomCellRenderer = TableView.BaseCellRenderer.extend({
+ canRender: function(cellData) {
+ // This method returns "true" for the "range" field
+ return cellData.field === "status";
+ },
+
+ // This render function only works when canRender returns "true"
+ render: function($td, cellData) {
+ console.log("cellData: ", cellData);
+
+ var icon = "question";
+ if(ICONS.hasOwnProperty(cellData.value)) {
+ icon = ICONS[cellData.value];
+ }
+ $td.addClass("icon").html(_.template('', {
+ icon: icon,
+ status: cellData.value
+ }));
+ }
+ });
+
+ // Use the BasicRowRenderer class to create a custom table row renderer
+ var CustomRowRenderer = TableView.BaseRowExpansionRenderer.extend({
+ canRender: function(rowData) {
+ console.log("RowData: ", rowData);
+ return true;
+ },
+
+ initialize: function(args){
+ this._searchManager = new SearchManager({
+ id: 'details-search-manager',
+ preview: false
+ });
+ this._TableView = new TableView({
+ id: 'ResultsTable',
+ managerid: 'details-search-manager',
+ drilldown: "all",
+ drilldownRedirect: true
+ });
+ },
+
+ render: function($container, rowData) {
+ // Print the rowData object to the console
+ console.log("RowData: ", rowData);
+
+ var repoNameCell = _(rowData.cells).find(function (cell) {
+ return cell.field === 'Repository Name';
+ });
+
+
+ var workflowName = _(rowData.cells).find(function (cell) {
+ return cell.field === 'Workflow Name';
+ });
+
+ var workflowIDCell = _(rowData.cells).find(function (cell) {
+ return cell.field === 'Run ID';
+ });
+
+ this._TableView.on("click", function(e) {
+ e.preventDefault();
+ console.log(e);
+ window.open("/app/github_app_for_splunk/workflow_details?form.workflow_id="+workflowIDCell.value+"&form.repoName="+repoNameCell.value+"&form.workflowName="+workflowName.value+"&form.field1.earliest=-24h%40h&form.field1.latest=now&form.timeRange.earliest=-30d%40d&form.timeRange.latest=now&form.workflowCount=25",'_self');
+ });
+
+ this._searchManager.set({ search: '`github_webhooks` (workflow_run.id='+workflowIDCell.value+' OR workflow_job.run_id='+workflowIDCell.value+') | eval started=if(action=="requested", _time, null), completed=if(action=="completed", _time,null) | stats latest(workflow_run.conclusion) as Status, earliest(started) as Started, latest(completed) as Completed, latest(workflow_run.head_branch) as Branch, latest(workflow_run.event) as Trigger | eval Duration=tostring(Completed-Started, "Duration") | eval Started=strftime(Started,"%Y-%m-%dT%H:%M:%S"), Completed=strftime(Completed,"%Y-%m-%dT%H:%M:%S")| fields Status, Started, Completed, Duration, Branch, Trigger | eval Details="Click here for Workflow Details" | transpose|rename column AS Details| rename "row 1" AS values'});
+ // $container is the jquery object where we can put out content.
+ // In this case we will render our chart and add it to the $container
+ $container.append(this._TableView.render().el);
+ }
+ });
+
+ // Create an instance of the custom cell renderer,
+ // add it to the table, and render the table
+ var myCellRenderer = new CustomCellRenderer();
+ mycustomrowtable.addCellRenderer(myCellRenderer);
+ mycustomrowtable.render();
+
+ // Create an instance of the custom row renderer,
+ // add it to the table, and render the table
+ var myRowRenderer = new CustomRowRenderer();
+ mycustomrowtable.addRowExpansionRenderer(myRowRenderer);
+ mycustomrowtable.render();
+
+ mycustomrowtable.on("click", function(e) {
+ e.preventDefault();
+ console.log(e.data);
+ window.open("/app/github_app_for_splunk/workflow_details?form.repoName="+e.data["row.repository.full_name"]+"&form.workflowName="+e.data["row.workflow_job.name"]+"&form.field1.earliest=-24h%40h&form.field1.latest=now&form.timeRange.earliest=-30d%40d&form.timeRange.latest=now&form.workflowCount=25",'_blank');
+ });
+
+});
diff --git a/github_app_for_splunk/appserver/static/tabs.css b/github_app_for_splunk/appserver/static/tabs.css
index 3e61043..4d7705b 100644
--- a/github_app_for_splunk/appserver/static/tabs.css
+++ b/github_app_for_splunk/appserver/static/tabs.css
@@ -17,3 +17,26 @@
border-top: 0px;
}
+/**
+ * This fixes the issue where the tab focus looks weird.
+ */
+
+.nav-tabs {
+ background: #212527;
+}
+
+.nav-tabs > li > a {
+ color: #FFF;
+}
+
+.nav-tabs > li > a:focus {
+ box-shadow: none !important;
+}
+
+.nav-tabs > li:focus-within:after {
+ box-shadow: inset -2px 2px 3px rgba(82, 168, 236, .5);
+}
+
+.nav-tabs > li:focus-within:before {
+ box-shadow: inset 3px 2px 3px rgba(82, 168, 236, .5);
+}
diff --git a/github_app_for_splunk/appserver/static/tabs.js b/github_app_for_splunk/appserver/static/tabs.js
index 32bea79..1858339 100644
--- a/github_app_for_splunk/appserver/static/tabs.js
+++ b/github_app_for_splunk/appserver/static/tabs.js
@@ -1,178 +1,241 @@
require(['jquery','underscore','splunkjs/mvc', 'bootstrap.tab', 'splunkjs/mvc/simplexml/ready!'],
- function($, _, mvc){
-
- var tabsInitialzed = [];
-
- /**
- * The below defines the tab handling logic.
- */
-
- // The normal, auto-magical Bootstrap tab processing doesn't work for us since it requires a particular
- // layout of HTML that we cannot use without converting the view entirely to simpleXML. So, we are
- // going to handle it ourselves.
- var hideTabTargets = function(){
-
- var tabs = $('a[data-elements]');
-
- // Go through each toggle tab
- for(var c = 0; c < tabs.length; c++){
-
- // Hide the targets associated with the tab
- var targets = $(tabs[c]).data("elements").split(",");
-
- for(var d = 0; d < targets.length; d++){
- $('#' + targets[d], this.$el).hide();
- }
- }
- };
-
- var rerenderPanels = function(row_id, force){
-
- // Set a default argument for dont_rerender_until_needed
- if( typeof force === 'undefined'){
- force = true;
- }
-
- // Don't do both if the panel was already rendered
- if( !force && _.contains(tabsInitialzed, row_id) ){
- return;
- }
-
- // Get the elements so that we can find the components to re-render
- var elements = $('#' + row_id + ' .dashboard-element');
-
- // Iterate the list and re-render the components so that they fill the screen
- for(var d = 0; d < elements.length; d++){
-
- // Determine if this is re-sizable
- if( $('#' + row_id + ' .ui-resizable').length > 0){
-
- var component = mvc.Components.get(elements[d].id);
-
- if(component){
- component.render();
- }
- }
- }
-
- // Remember that we initialized this tab
- tabsInitialzed.push(row_id);
- };
-
- var selectTab = function (e) {
-
- // Stop if the tabs have no elements
- if( $(e.target).data("elements") === undefined ){
- console.warn("Yikes, the clicked tab has no elements to hide!");
- return;
- }
-
- // Get the IDs that we should enable for this tab
- var toToggle = $(e.target).data("elements").split(",");
-
- // Hide the tab content by default
- hideTabTargets();
-
- // Now show this tabs toggle elements
- for(var c = 0; c < toToggle.length; c++){
-
- // Show the items
- $('#' + toToggle[c], this.$el).show();
-
- // Re-render the panels under the item if necessary
- rerenderPanels(toToggle[c]);
- }
-
- };
-
- // Wire up the function to show the appropriate tab
- $('a[data-toggle="tab"]').on('shown', selectTab);
-
- // Show the first tab
- $('.toggle-tab').first().trigger('shown');
-
- // Make the tabs into tabs
- $('#tabs', this.$el).tab();
-
+ function($, _, mvc){
+
+ var tabsInitialzed = [];
+
+ /**
+ * The below defines the tab handling logic.
+ */
+
+ /**
+ * This hides the content associated with the tabs.
+ *
+ * The normal, auto-magical Bootstrap tab processing doesn't work for us since it requires a particular
+ * layout of HTML that we cannot use without converting the view entirely to simpleXML. So, we are
+ * going to handle it ourselves.
+ * @param {string} tabSetClass the
+ */
+ var hideTabTargets = function(tabSetClass) {
+
+ var tabs = $('a[data-elements]');
+
+ // If we are only applying this to a particular set of tabs, then limit the selector accordingly
+ if (typeof tabSetClass !== 'undefined' && tabSetClass) {
+ tabs = $('a.' + tabSetClass + '[data-elements]');
+ }
+
+ // Go through each toggle tab
+ for (var c = 0; c < tabs.length; c++) {
+
+ // Hide the targets associated with the tab
+ var targets = $(tabs[c]).data("elements").split(",");
+
+ for (var d = 0; d < targets.length; d++) {
+ $('#' + targets[d], this.$el).hide();
+ }
+ }
+ };
+
+ /**
+ * Force a re-render of the panels with the given row ID.
+ *
+ * @param {string} row_id The ID of the row to force a rerender on
+ * @param {bool} force Force the tab to re-render even if it was already rendered once (defaults to true)
+ */
+ var rerenderPanels = function(row_id, force){
+
+ // Set a default argument for dont_rerender_until_needed
+ if( typeof force === 'undefined'){
+ force = true;
+ }
+
+ // Don't do both if the panel was already rendered
+ if( !force && _.contains(tabsInitialzed, row_id) ){
+ return;
+ }
+
+ // Get the elements so that we can find the components to re-render
+ var elements = $('#' + row_id + ' .dashboard-element');
+
+ // Iterate the list and re-render the components so that they fill the screen
+ for(var d = 0; d < elements.length; d++){
+
+ // Determine if this is re-sizable
+ if( $('#' + row_id + ' .ui-resizable').length > 0){
+
+ var component = mvc.Components.get(elements[d].id);
+
+ if(component){
+ component.render();
+ }
+ }
+ }
+
+ // Remember that we initialized this tab
+ tabsInitialzed.push(row_id);
+ };
+
+ /**
+ * Handles the selection of a partiular tab.
+ *
+ * @param {*} e
+ */
+ var selectTab = function (e) {
+ console.log("selectTab");
+ // Update which tab is considered active
+ $('#tabs > li.active').removeClass("active");
+ $(e.target).closest("li").addClass("active");
+
+ // clearTabControlTokens();
+ setActiveTabToken();
+
+ // Stop if the tabs have no elements
+ if( $(e.target).data("elements") === undefined ){
+ console.warn("Yikes, the clicked tab has no elements to hide!");
+ return;
+ }
+
+ // Determine if the set of tabs has a restriction on the classes to manipulate
+ var tabSet = null;
+
+ if ($(e.target).data("tab-set") !== undefined) {
+ tabSet = $(e.target).data("tab-set");
+ }
+
+ // Get the IDs that we should enable for this tab
+ var toToggle = $(e.target).data("elements").split(",");
+
+ // Hide the tab content by default
+ hideTabTargets(tabSet);
+
+ // Now show this tabs toggle elements
+ for(var c = 0; c < toToggle.length; c++){
+
+ // Show the items
+ $('#' + toToggle[c], this.$el).show();
+
+ // Re-render the panels under the item if necessary
+ rerenderPanels(toToggle[c]);
+ }
+
+ };
+
/**
* The code below handles the tokens that trigger when searches are kicked off for a tab.
*/
-
- // Get the tab token for a given tab name
+
+ /**
+ * Get the tab token for a given tab name
+ * @param {string} tab_name The name of the tab
+ */
var getTabTokenForTabName = function(tab_name){
- return tab_name; //"tab_" +
- }
-
+ return tab_name;
+ };
+
// Get all of the possible tab control tokens
var getTabTokens = function(){
- var tabTokens = [];
-
- var tabLinks = $('#tabs > li > a');
-
- for(var c = 0; c < tabLinks.length; c++){
- tabTokens.push( getTabTokenForTabName( $(tabLinks[c]).data('token') ) );
- }
-
- return tabTokens;
- }
-
- // Clear all but the active tab control tokens
+ var tabTokens = [];
+
+ var tabLinks = $('#tabs > li > a');
+
+ for(var c = 0; c < tabLinks.length; c++){
+ tabTokens.push( getTabTokenForTabName( $(tabLinks[c]).data('token') ) );
+ }
+
+ return tabTokens;
+ };
+
+ /**
+ * Clear all but the active tab control tokens
+ */
var clearTabControlTokens = function(){
- console.info("Clearing tab control tokens");
-
- //tabsInitialzed = [];
- var tabTokens = getTabTokens();
- var activeTabToken = getActiveTabToken();
- var tokens = mvc.Components.getInstance("submitted");
-
- // Clear the tokens for all tabs except for the active one
- for(var c = 0; c < tabTokens.length; c++){
-
- if( activeTabToken !== tabTokens[c] ){
- tokens.set(tabTokens[c], undefined);
- }
- }
- }
-
- // Get the tab control token for the active tab
+ console.info("Clearing tab control tokens");
+
+ //tabsInitialzed = [];
+ var tabTokens = getTabTokens();
+ var activeTabToken = getActiveTabToken();
+ var tokens = mvc.Components.getInstance("submitted");
+
+ // Clear the tokens for all tabs except for the active one
+ for(var c = 0; c < tabTokens.length; c++){
+
+ if( activeTabToken !== tabTokens[c] ){
+ tokens.set(tabTokens[c], undefined);
+ }
+ }
+ };
+
+ /**
+ * Get the tab control token for the active tab
+ */
var getActiveTabToken = function(){
- return $('#tabs > li.active > a').data('token');
- }
-
- // Set the token for the active tab
+ return $('#tabs > li.active > a').data('token');
+ };
+
+ /**
+ * Set the token for the active tab
+ */
var setActiveTabToken = function(){
- var activeTabToken = getActiveTabToken();
-
- var tokens = mvc.Components.getInstance("submitted");
-
- tokens.set(activeTabToken, '');
- }
-
+ var activeTabToken = getActiveTabToken();
+ var tokens = mvc.Components.getInstance("submitted");
+
+ if(activeTabToken){
+ // Set each token if necessary
+ activeTabToken.split(",").forEach(function(token){
+
+ // If the token wasn't set, set it so that the searches can run
+ if(!tokens.toJSON()[token] || tokens.toJSON()[token] == undefined){
+ tokens.set(token, "");
+ }
+ });
+ }
+ };
+
+ /**
+ * Handle the setting of the token for the clicked tab.
+ * @param {*} e
+ */
var setTokenForTab = function(e){
-
- // Get the token for the tab
- var tabToken = getTabTokenForTabName($(e.target).data('token'));
-
- // Set the token
- var tokens = mvc.Components.getInstance("submitted");
- tokens.set(tabToken, '');
-
- console.info("Set the token for the active tab (" + tabToken + ")");
- }
-
- $('a[data-toggle="tab"]').on('shown', setTokenForTab);
-
- // Wire up the tab control tokenization
- var submit = mvc.Components.get("submit");
-
- if( submit ){
- submit.on("submit", function() {
- clearTabControlTokens();
- });
- }
-
- // Set the token for the selected tab
- setActiveTabToken();
-
-});
+ // Get the token for the tab
+ var tabToken = getTabTokenForTabName($(e.target).data('token'));
+
+ // Set the token
+ var tokens = mvc.Components.getInstance("submitted");
+ tokens.set(tabToken, '');
+
+ console.info("Set the token for the active tab (" + tabToken + ")");
+ };
+
+ /**
+ * Perform the initial setup for making the tabs work.
+ */
+ var firstTimeTabSetup = function() {
+ $('a.toggle-tab').on('shown', setTokenForTab);
+
+ // Wire up the function to show the appropriate tab
+ $('a.toggle-tab').on('click shown', selectTab);
+
+ // Show the first tab in each tab set
+ $.each($('.nav-tabs'), function(index, value) {
+ $('.toggle-tab', value).first().trigger('shown');
+ });
+
+ // Make the tabs into tabs
+ $('#tabs', this.$el).tab();
+
+ // Wire up the tab control tokenization
+ var submit = mvc.Components.get("submit");
+
+ if(submit){
+ submit.on("submit", function() {
+ clearTabControlTokens();
+ });
+ }
+
+ // Set the token for the selected tab
+ setActiveTabToken();
+ };
+
+ firstTimeTabSetup();
+});
diff --git a/github_app_for_splunk/appserver/static/workflowdetails.js b/github_app_for_splunk/appserver/static/workflowdetails.js
new file mode 100644
index 0000000..d9498f4
--- /dev/null
+++ b/github_app_for_splunk/appserver/static/workflowdetails.js
@@ -0,0 +1,118 @@
+require([
+ "underscore",
+ "splunkjs/mvc",
+ "splunkjs/mvc/searchmanager",
+ "splunkjs/mvc/tableview",
+ "splunkjs/mvc/simplexml/ready!"
+], function(
+ _,
+ mvc,
+ SearchManager,
+ TableView
+) {
+
+ // Set up search managers
+ var search2 = new SearchManager({
+ id: "workflow_details",
+ preview: true,
+ cache: true,
+ search: mvc.tokenSafe("`github_webhooks` eventtype=\"GitHub::Workflow\" \"workflow_job.run_id\"=$workflow_id$| fields * | eval queued=if(action==\"queued\",_time,null), started=if(action==\"in_progress\",_time,null), completed=if(action==\"completed\",_time,null) | stats latest(workflow_job.conclusion) as status, latest(workflow_job.name) as Name, latest(queued) as queued, latest(started) as started, latest(completed) as completed by workflow_job.id | eval queueTime=toString(round(started-queued),\"Duration\"), runTime=toString(round(completed-started),\"Duration\"), totalTime=toString(round(completed-queued),\"Duration\"), status=if(status==\"null\",\"in_progress\",status) | rename workflow_job.id AS JobID | fields status, Name, JobID, queueTime, runTime, totalTime"),
+ earliest_time: mvc.tokenSafe("timeTkn.earliest$"),
+ latest_time: mvc.tokenSafe("timeTkn.latest$")
+ });
+
+ // Create a table for a custom row expander
+ var mycustomrowtable = new TableView({
+ id: "table-customrow",
+ managerid: "workflow_details",
+ drilldown: "none",
+ drilldownRedirect: false,
+ el: $("#table-customrow")
+ });
+
+ // Define icons for the custom table cell
+ var ICONS = {
+ failure: "error",
+ in_progress: "question-circle",
+ success: "check-circle"
+ };
+
+ // Use the BaseCellRenderer class to create a custom table cell renderer
+ var CustomCellRenderer = TableView.BaseCellRenderer.extend({
+ canRender: function(cellData) {
+ // This method returns "true" for the "range" field
+ return cellData.field === "status";
+ },
+
+ // This render function only works when canRender returns "true"
+ render: function($td, cellData) {
+ console.log("cellData: ", cellData);
+
+ var icon = "question";
+ if(ICONS.hasOwnProperty(cellData.value)) {
+ icon = ICONS[cellData.value];
+ }
+ $td.addClass("icon").html(_.template('', {
+ icon: icon,
+ status: cellData.value
+ }));
+ }
+ });
+
+ // Use the BasicRowRenderer class to create a custom table row renderer
+ var CustomRowRenderer = TableView.BaseRowExpansionRenderer.extend({
+ canRender: function(rowData) {
+ console.log("RowData: ", rowData);
+ return true;
+ },
+
+ initialize: function(args){
+ this._searchManager = new SearchManager({
+ id: 'details-search-manager',
+ preview: false
+ });
+ this._TableView = new TableView({
+ id: 'ResultsTable',
+ managerid: 'details-search-manager',
+ drilldown: "all",
+ drilldownRedirect: true
+ });
+ },
+
+ render: function($container, rowData) {
+ // Print the rowData object to the console
+ // console.log("RowData: ", rowData);
+
+ var repoNameCell = _(rowData.cells).find(function (cell) {
+ return cell.field === 'Repository Name';
+ });
+
+
+ var workflowName = _(rowData.cells).find(function (cell) {
+ return cell.field === 'Workflow Name';
+ });
+
+ var workflowIDCell = _(rowData.cells).find(function (cell) {
+ return cell.field === 'Run ID';
+ });
+
+ this._searchManager.set({ search: '`github_webhooks` (workflow_run.id='+workflowIDCell.value+' OR workflow_job.run_id='+workflowIDCell.value+') | eval started=if(action=="requested", _time, null), completed=if(action=="completed", _time,null) | stats latest(workflow_run.conclusion) as Status, earliest(started) as Started, latest(completed) as Completed, latest(workflow_run.head_branch) as Branch, latest(workflow_run.event) as Trigger | eval Duration=tostring(Completed-Started, "Duration") | fields Status, Duration, Branch, Trigger | eval Details="Click here for Workflow Details" | transpose|rename column AS Details| rename "row 1" AS values'});
+ // $container is the jquery object where we can put out content.
+ // In this case we will render our chart and add it to the $container
+ $container.append(this._TableView.render().el);
+ }
+ });
+
+ // Create an instance of the custom cell renderer,
+ // add it to the table, and render the table
+ var myCellRenderer = new CustomCellRenderer();
+ mycustomrowtable.addCellRenderer(myCellRenderer);
+ mycustomrowtable.render();
+
+ // Create an instance of the custom row renderer,
+ // add it to the table, and render the table
+ var myRowRenderer = new CustomRowRenderer();
+ mycustomrowtable.render();
+
+
+});
diff --git a/github_app_for_splunk/default/app.conf b/github_app_for_splunk/default/app.conf
index 022cf47..57e3596 100644
--- a/github_app_for_splunk/default/app.conf
+++ b/github_app_for_splunk/default/app.conf
@@ -7,11 +7,11 @@ version = X.Y.Z
[ui]
is_visible = 1
-label = Github App for Splunk
+label = GitHub App for Splunk
[launcher]
author = Doug Erkkila
-description = Report on Activity and Audit Data from Github
+description = Report on Activity and Audit Data from GitHub
version = X.Y.Z
[package]
diff --git a/github_app_for_splunk/default/data/ui/nav/default.xml b/github_app_for_splunk/default/data/ui/nav/default.xml
index a493681..e952452 100644
--- a/github_app_for_splunk/default/data/ui/nav/default.xml
+++ b/github_app_for_splunk/default/data/ui/nav/default.xml
@@ -1,14 +1,30 @@
\ No newline at end of file
+
+
+
+
+
+
diff --git a/github_app_for_splunk/default/data/ui/views/1_system_health_monitor.xml b/github_app_for_splunk/default/data/ui/views/1_system_health_monitor.xml
index 701d8c3..b8d12c4 100644
--- a/github_app_for_splunk/default/data/ui/views/1_system_health_monitor.xml
+++ b/github_app_for_splunk/default/data/ui/views/1_system_health_monitor.xml
@@ -137,9 +137,8 @@
Forks
- | mstats avg(_value) prestats=true WHERE `github_collectd` AND metric_name="processes.fork_rate.value" AND host="$envTkn$" span=10s BY metric_name
-| eval metric_name=mvindex(split(metric_name,"."),1)
-| timechart avg(_value) as "Avg" span=10sec by metric_name
+ | mstats rate_avg("processes.fork_rate.value") as "Rate (Avg) /s" chart=true WHERE `github_collectd` host="$envTkn$" span=10s
+| fields - _span*$timeTkn.earliest$$timeTkn.latest$
diff --git a/github_app_for_splunk/default/data/ui/views/2_process_monitor.xml b/github_app_for_splunk/default/data/ui/views/2_process_monitor.xml
index 802adb8..a9e8fbe 100644
--- a/github_app_for_splunk/default/data/ui/views/2_process_monitor.xml
+++ b/github_app_for_splunk/default/data/ui/views/2_process_monitor.xml
@@ -55,8 +55,135 @@
+
+
+
+
+
+
+
+
+
+
+
+ CPU (Kernel)
+
+ | mstats rate_avg("processes.ps_cputime.syst") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=max chart.limit=top100 BY plugin_instance
+| fields - _span*
+| rename "_time /s" AS _time
+ -1h
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ CPU (Application)
+
+ | mstats rate_avg("processes.ps_cputime.user") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=max chart.limit=top100 BY plugin_instance
+| fields - _span*
+| rename "_time /s" AS _time
+ -1h
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ I/O Operations (Read IOPS)
+
+ | mstats rate_avg("processes.ps_disk_ops.read") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=max chart.limit=top100 BY plugin_instance
+| fields - _span*
+| rename "_time /s" AS _time
+ -1h
+
+
+
+
+
+
+
+
+
+
+
+
+
+ I/O Operations (Write IOPS)
+
+ | mstats rate_avg("processes.ps_disk_ops.write") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=max chart.limit=top100 BY plugin_instance
+| fields - _span*
+| rename "_time /s" AS _time
+ -1h
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Storage Traffic (Read)
+
+ | mstats rate_avg("processes.ps_storage_octets.read") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=avg chart.limit=top100 BY plugin_instance
+| fields - _span*
+| rename "_time /s" AS _time
+ -1h
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Storage Traffic (Write)
+
+ | mstats rate_avg("processes.ps_storage_octets.write") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=avg chart.limit=top100 BY plugin_instance
+| fields - _span*
+| rename "_time /s" AS _time
+ -1h
+
+
+
+
+
+
+
diff --git a/github_app_for_splunk/default/data/ui/views/3_authentication_monitor.xml b/github_app_for_splunk/default/data/ui/views/3_authentication_monitor.xml
new file mode 100644
index 0000000..f35698d
--- /dev/null
+++ b/github_app_for_splunk/default/data/ui/views/3_authentication_monitor.xml
@@ -0,0 +1,314 @@
+
diff --git a/github_app_for_splunk/default/data/ui/views/8_storage_monitor.xml b/github_app_for_splunk/default/data/ui/views/8_storage_monitor.xml
new file mode 100644
index 0000000..96ecbbb
--- /dev/null
+++ b/github_app_for_splunk/default/data/ui/views/8_storage_monitor.xml
@@ -0,0 +1,128 @@
+
diff --git a/github_app_for_splunk/default/data/ui/views/api_config.xml b/github_app_for_splunk/default/data/ui/views/api_config.xml
deleted file mode 100644
index c101422..0000000
--- a/github_app_for_splunk/default/data/ui/views/api_config.xml
+++ /dev/null
@@ -1,232 +0,0 @@
-
-
-
-
-
-
GitHub Enterprise Audit Log Monitoring
-
This modular input makes an HTTPS request to the GitHub Enterprise's Audit Log REST API endpoint at a definable interval to fetch audit log data.
-
Prerequisites
-
-
Splunk v8+
-
-
Installation
-
-
-
Download the latest release of the Splunk Add-On for Github Enterprise Audit Logs from SplunkBase
-
-
-
Go to Apps > Manage Apps in the toolbar menu.
-
-
-
Use the "Install app from file" button to upload the spl file you downloaded from Splunkbase
-
-
-
Generate a Personal Access Token in GitHub Enterprise with the site_admin scope.
-
-
-
Under Settings > Data inputs, there should be a new option called Github Audit Log Monitoring, click "+ Add new"
-
-
-
Configure the Input by entering the necessary information in the input fields. Don't forget to define the Index for the data to be stored in. This option is under the "More settings" option.
-
-
-
Under Settings > Advanced Search, select Search Macros. You'll need to update the github_source macro to use the Index you assigned above.
-
-
-
Configuration
-
Personal Access Token Scope
-
The following are the required scopes for the personal access token allowing the module to fetch the audit log entries successfully:
-
-
[x] admin:enterprise Full control of enterprises
-
-
[x] manage_billing:enterprise Read and write enterprise billing data
-
-
[x] read:enterprise Read enterprise profile data
-
-
-
-
-
Input Fields
-
-
-
- name
-
-
-
This is name of your instance. You can have multiple modular inputs running simultaneously. However, this is not a recommended behavior for this module.
-
Takes: alpha-numeric, white spaces and symbol characters
-
Example: GHE-enterprise-name
-
-
-
-
-
- Hostname
-
-
-
This is the hostname of your GitHub Enterprise instance. Make sure there are no trailing / in the URL provided. This could either be a FQDN or an IP address. Do not append any paths beyond the tld.
The enterprise name for which to fetch audit log events
-
-
-
-
- Personal Access Token
-
-
-
This is your personal access token that you generate for your or a service account in GitHub Enterprise. This module requires that the personal access token be created with the site_admin scope. This is a very sensitive token so make sure to keep it secure at all times!
-
Security: The personal access token is encrypted and stored in Splunk's password storage. After you configure it the first time it will be replaced in Splunk's UI with a unique identifier. This identifier will be used by the module to fetch the personal access token before making the API request to GitHub Enterprise.
The maximum number of events / entries to fetch each time the script runs. To understand how to calculate the maximum number of entries and interval to best fit your organization go to the Tweaking throughput section below.
-
-
-
-
- Verify Self-Signed Certificates
-
-
-
This is a parameter passed to the get() method in the Requests library. If the checkbox is cheked then the SSL certificate will be verified like a browser does and Requests will throw a SSLError if itâs unable to verify the certificate. Uncheck this box if you are using self-signed certificates.
-
-
-
-
- Debug Mode
-
-
-
The personal access token will be leaked in the splunkd logs. DO NOT ENABLE unless you are ready to update your personal access token.
-
If you are experiencing issues and the module is not operating as intended, you can enable this mode to seethe module's debugging information in the splunkd logs.
-
-
-
-
- Interval
-
-
-
Takes a cron expression as defined in the Splunk docs.
-
Example: 30 * * * *
-
-
At minute 30 of every hour. For example, if you set this CRON job at 11:02, your job will begin running at 11:30, 12:30, 1:30, etc...
-
-
-
Example: */5 * * * *
-
-
Every 5 minutes
-
-
-
Example: 300
-
-
Every 300 seconds or 5 minutes
-
-
-
-
-
-
Tweaking throughput
-
This modular input fetches events by calling the Enterprise Audit Log API. This API returns a maximum of 100 events / entries per page. The pagination algorithm can fetch events up to the maximum entries per run defined. It's important to tweak the maximum entries per run and interval parameters to have the ability to fetch your data in a timely manner and stay as close to real-time as possible.
-
- Example:
-
-
-
Enterprise
-
Events per minute
-
Maximum entries per run
-
Interval
-
API calls used
-
Guidance
-
-
-
Evil-Corp
-
1000
-
1000
-
*/1 * * * *
-
3000 per hour
-
The modular input should be able to handle this with ease.
-
-
-
Poizen-Inc
-
5000
-
5000
-
*/1 * * * *
-
600 per hour
-
We are approaching API rate limit per hour. Depending on latency, 5000 entries = 50 API calls per minute. One minute might not be sufficient to fetch all this data.
-
-
-
Monsters-Inc
-
10000
-
2000
-
*/1 * * * *
-
1200 per hour
-
We are approaching API rate limit per hour. Depending on latency, 5000 entries = 50 API calls per minute. One minute might not be sufficient to fetch all this data.
-
-
-
-
FAQs
-
How is my Personal Access Token secured?
-
On the first run the modular input will identify that your personal access token (PAT) is not encrypted. It will encrypt your PAT and store it in Splunk's credentials manager. It will replace the plaintext PAT with an md5 hash of an identifying key.
-
Your personal access token is only visible in plaintext from the time you configure the modular input instance until the first run.
-
Does the interval field access only cron syntax?
-
No, you can enter the number of seconds instead.
-
I enabled debug mode, what now?
-
If you've enabled debug mode be ready to change your personal access token because it will most likely be leaked into the Splunk logs in plain text.
-
Why can't I use a GitHub app instead of a personal access token?
-
GitHub apps cannot be installed on the enterprise level. The REST API requires enterprise admin privileges which are out of scope for GitHub apps.
-
Troubleshooting
-
Read logs in Splunk
-
You can use this search query to fetch all the logs belonging to this module when Debug Mode is enabled.