diff --git a/.github/actions/appinspect_publish/Dockerfile b/.github/actions/appinspect_publish/Dockerfile new file mode 100644 index 0000000..ded4720 --- /dev/null +++ b/.github/actions/appinspect_publish/Dockerfile @@ -0,0 +1,14 @@ +# Container image that runs your code +FROM python:3-slim AS builder + +# Copies your code file from your action repository to the filesystem path `/` of the container +ADD . /app +WORKDIR /app + +RUN pip install --target=/app requests + +# Code file to execute when the docker container starts up (`entrypoint.sh`) +FROM gcr.io/distroless/python3-debian10 +COPY --from=builder /app /app +ENV PYTHONPATH /app +CMD ["/app/publish.py"] \ No newline at end of file diff --git a/.github/actions/appinspect_publish/README.md b/.github/actions/appinspect_publish/README.md new file mode 100644 index 0000000..99a7635 --- /dev/null +++ b/.github/actions/appinspect_publish/README.md @@ -0,0 +1,21 @@ +# Hello world javascript action + +This action prints "Hello World" or "Hello" + the name of a person to greet to the log. + +## Inputs + +## `who-to-greet` + +**Required** The name of the person to greet. Default `"World"`. + +## Outputs + +## `time` + +The time we greeted you. + +## Example usage + +uses: actions/hello-world-javascript-action@v1.1 +with: + who-to-greet: 'Mona the Octocat' \ No newline at end of file diff --git a/.github/actions/appinspect_publish/action.yml b/.github/actions/appinspect_publish/action.yml new file mode 100644 index 0000000..ea0b2f4 --- /dev/null +++ b/.github/actions/appinspect_publish/action.yml @@ -0,0 +1,30 @@ +name: 'Hello World' +description: 'Greet someone and record the time' +inputs: + APP_ID: + description: 'App ID From Splunkbase' + required: true + default: '5596' + SPLUNK_USERNAME: + description: 'Splunkbase Username' + required: true + SPLUNK_PASSWORD: + description: 'Splunkbase Password' + required: true + APP_FILE: + description: 'The name of the file, for example "my_package.tar.gz".' + required: true + SPLUNK_VERSION: + description: 'The Splunk version(s) that the release is compatible with. For example, "8.0,8.1,8.2".' + required: true + VISIBILITY: + description: 'true = The release is to be visible upon package validation success. false = if the release is to be hidden.' + required: false + default: 'false' + CIM_VERSIONS: + description: 'The CIM version(s) that the release is compatible with. For example, "4.9,4.7".' + required: false + default: '' +runs: + using: 'docker' + image: 'Dockerfile' diff --git a/.github/actions/appinspect_publish/publish.py b/.github/actions/appinspect_publish/publish.py new file mode 100644 index 0000000..8b8adcb --- /dev/null +++ b/.github/actions/appinspect_publish/publish.py @@ -0,0 +1,35 @@ +import os +import requests +from requests.auth import HTTPBasicAuth + +APP_ID= os.environ['INPUT_APP_ID'] +filepath = os.environ['INPUT_APP_FILE'] +SPLUNK_USERNAME = os.environ['INPUT_SPLUNK_USERNAME'] +SPLUNK_PASSWORD = os.environ['INPUT_SPLUNK_PASSWORD'] +SPLUNK_VERSION = os.environ['INPUT_SPLUNK_VERSION'] +VISIBILITY = os.environ['INPUT_VISIBILITY'] +CIM_VERSIONS = os.environ['INPUT_CIM_VERSIONS'] + +api_path = 'https://splunkbase.splunk.com/api/v1/app/{}/new_release'.format(APP_ID) + +auth = HTTPBasicAuth(SPLUNK_USERNAME, SPLUNK_PASSWORD) + +files = { + 'files[]': open(filepath, 'rb'), + 'filename': (None, os.path.basename(filepath)), + 'splunk_versions': (None, SPLUNK_VERSION), + 'visibility': (None, VISIBILITY), + 'cim_versions': (None, CIM_VERSIONS) +} + +response = requests.post(api_path, files=files, auth=auth) + +print(response.status_code) +print(response.text) + +# if status code is not 200, print the response text +if response.status_code != 200: + response.raise_for_status() + exit(response.status_code) +else: + exit(0) diff --git a/.github/actions/log_to_splunk/Dockerfile b/.github/actions/log_to_splunk/Dockerfile new file mode 100644 index 0000000..543e192 --- /dev/null +++ b/.github/actions/log_to_splunk/Dockerfile @@ -0,0 +1,14 @@ +# Container image that runs your code +FROM python:3-slim AS builder + +# Copies your code file from your action repository to the filesystem path `/` of the container +ADD . /app +WORKDIR /app + +RUN pip install --target=/app requests + +# Code file to execute when the docker container starts up (`entrypoint.sh`) +FROM gcr.io/distroless/python3-debian10 +COPY --from=builder /app /app +ENV PYTHONPATH /app +CMD ["/app/main.py"] diff --git a/.github/actions/log_to_splunk/LICENSE b/.github/actions/log_to_splunk/LICENSE new file mode 100644 index 0000000..4fc208e --- /dev/null +++ b/.github/actions/log_to_splunk/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Splunk GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.github/actions/log_to_splunk/README.md b/.github/actions/log_to_splunk/README.md new file mode 100644 index 0000000..3b5e9fb --- /dev/null +++ b/.github/actions/log_to_splunk/README.md @@ -0,0 +1,2 @@ +# POST GitHub Workflow Logs to Splunk HTTP Event Collector +test diff --git a/.github/actions/log_to_splunk/action.yml b/.github/actions/log_to_splunk/action.yml new file mode 100644 index 0000000..67b803d --- /dev/null +++ b/.github/actions/log_to_splunk/action.yml @@ -0,0 +1,28 @@ +# action.yml +name: 'Send Workflow Logs to Splunk' +description: 'Upload GitHub Workflow logs to Splunk HEC' +inputs: + splunk-url: + description: 'Full URL for Splunk HEC endpoint' + required: true + hec-token: + description: 'Splunk HEC Token' + required: true + github-token: + description: 'Github PAT' + required: true + sourcetype: + description: 'Splunk Sourcetype' + default: 'github_workflow_log_action' + source: + description: 'GitHub Workflow name' + default: ${{ github.workflow }} + workflowID: + description: 'The Workflow Run number' + default: ${{ github.run_number}} +outputs: + status: + description: 'value is success/fail based on POST result' +runs: + using: 'docker' + image: 'Dockerfile' diff --git a/.github/actions/log_to_splunk/main.py b/.github/actions/log_to_splunk/main.py new file mode 100644 index 0000000..8c127a6 --- /dev/null +++ b/.github/actions/log_to_splunk/main.py @@ -0,0 +1,146 @@ +import os +import requests +import json +import zipfile +import io +import glob +import re +from datetime import datetime + +def main(): + + GITHUB_REF=os.environ["GITHUB_REF"] + GITHUB_REPOSITORY=os.environ["GITHUB_REPOSITORY"] + GITHUB_RUN_ID=os.environ["GITHUB_RUN_ID"] + GITHUB_API_URL=os.environ["GITHUB_API_URL"] + GITHUB_WORKFLOWID=os.environ["INPUT_WORKFLOWID"] + GITHUB_TOKEN = os.environ.get("INPUT_GITHUB-TOKEN") + + SPLUNK_HEC_URL=os.environ["INPUT_SPLUNK-URL"]+"services/collector/event" + SPLUNK_HEC_TOKEN=os.environ["INPUT_HEC-TOKEN"] + SPLUNK_SOURCE=os.environ["INPUT_SOURCE"] + SPLUNK_SOURCETYPE=os.environ["INPUT_SOURCETYPE"] + + batch = count = 0 + eventBatch = "" + headers = {"Authorization": "Splunk "+SPLUNK_HEC_TOKEN} + host=os.uname()[1] + + summary_url = "{url}/repos/{repo}/actions/runs/{run_id}".format(url=GITHUB_API_URL,repo=GITHUB_REPOSITORY,run_id=GITHUB_WORKFLOWID) + + try: + x = requests.get(summary_url, stream=True, auth=('token',GITHUB_TOKEN)) + x.raise_for_status() + except requests.exceptions.HTTPError as errh: + output = "GITHUB API Http Error:" + str(errh) + print(f"Error: {output}") + print(f"::set-output name=result::{output}") + return x.status_code + except requests.exceptions.ConnectionError as errc: + output = "GITHUB API Error Connecting:" + str(errc) + print(f"Error: {output}") + print(f"::set-output name=result::{output}") + return x.status_code + except requests.exceptions.Timeout as errt: + output = "Timeout Error:" + str(errt) + print(f"Error: {output}") + print(f"::set-output name=result::{output}") + return x.status_code + except requests.exceptions.RequestException as err: + output = "GITHUB API Non catched error conecting:" + str(err) + print(f"Error: {output}") + print(f"::set-output name=result::{output}") + return x.status_code + except Exception as e: + print("Internal error", e) + return x.status_code + + summary = x.json() + + summary.pop('repository') + + summary["repository"]=summary["head_repository"]["name"] + summary["repository_full"]=summary["head_repository"]["full_name"] + + summary.pop('head_repository') + + utc_time = datetime.strptime(summary["updated_at"], "%Y-%m-%dT%H:%M:%SZ") + epoch_time = (utc_time - datetime(1970, 1, 1)).total_seconds() + + event={'event':json.dumps(summary),'sourcetype':SPLUNK_SOURCETYPE,'source':'workflow_summary','host':host,'time':epoch_time} + event=json.dumps(event) + + x=requests.post(SPLUNK_HEC_URL, data=event, headers=headers) + + + url = "{url}/repos/{repo}/actions/runs/{run_id}/logs".format(url=GITHUB_API_URL,repo=GITHUB_REPOSITORY,run_id=GITHUB_WORKFLOWID) + print(url) + + try: + x = requests.get(url, stream=True, auth=('token',GITHUB_TOKEN)) + + except requests.exceptions.HTTPError as errh: + output = "GITHUB API Http Error:" + str(errh) + print(f"Error: {output}") + print(f"::set-output name=result::{output}") + return + except requests.exceptions.ConnectionError as errc: + output = "GITHUB API Error Connecting:" + str(errc) + print(f"Error: {output}") + print(f"::set-output name=result::{output}") + return + except requests.exceptions.Timeout as errt: + output = "Timeout Error:" + str(errt) + print(f"Error: {output}") + print(f"::set-output name=result::{output}") + return + except requests.exceptions.RequestException as err: + output = "GITHUB API Non catched error conecting:" + str(err) + print(f"Error: {output}") + print(f"::set-output name=result::{output}") + return + + z = zipfile.ZipFile(io.BytesIO(x.content)) + z.extractall('/app') + + timestamp = batch = count = 0 + + for name in glob.glob('/app/*.txt'): + logfile = open(os.path.join(os.path.dirname(os.path.abspath(__file__)), name.replace('./','')),'r') + Lines = logfile.readlines() + for line in Lines: + + if line: + count+=1 + if timestamp: + t2=timestamp + timestamp = re.search("\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z",line.strip()) + + if timestamp: + timestamp = re.sub("\dZ","",timestamp.group()) + timestamp = datetime.strptime(timestamp,"%Y-%m-%dT%H:%M:%S.%f") + timestamp = (timestamp - datetime(1970,1,1)).total_seconds() + else: + timestamp=t2 + + x = re.sub("\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z","",line.strip()) + x=x.strip() + job_name=re.search("\/\d+\_(?P.*)\.txt",name) + job_name=job_name.group('job') + fields = {'lineNumber':count,'workflowID':GITHUB_WORKFLOWID,'job':job_name} + if x: + batch+=1 + event={'event':x,'sourcetype':SPLUNK_SOURCETYPE,'source':SPLUNK_SOURCE,'host':host,'time':timestamp,'fields':fields} + eventBatch=eventBatch+json.dumps(event) + else: + print("skipped line "+str(count)) + + if batch>=1000: + batch=0 + x=requests.post(SPLUNK_HEC_URL, data=eventBatch, headers=headers) + eventBatch="" + + x=requests.post(SPLUNK_HEC_URL, data=eventBatch, headers=headers) + +if __name__ == '__main__': + main() diff --git a/.github/actions/post_logs_to_splunk_hec/Dockerfile b/.github/actions/post_logs_to_splunk_hec/Dockerfile new file mode 100644 index 0000000..63b5160 --- /dev/null +++ b/.github/actions/post_logs_to_splunk_hec/Dockerfile @@ -0,0 +1,8 @@ +# Container image that runs your code +FROM python:3.8-slim-buster + +# Copies your code file from your action repository to the filesystem path `/` of the container +COPY entrypoint.sh /entrypoint.sh + +# Code file to execute when the docker container starts up (`entrypoint.sh`) +ENTRYPOINT ["/entrypoint.sh"] diff --git a/.github/actions/post_logs_to_splunk_hec/README.md b/.github/actions/post_logs_to_splunk_hec/README.md new file mode 100644 index 0000000..3b5e9fb --- /dev/null +++ b/.github/actions/post_logs_to_splunk_hec/README.md @@ -0,0 +1,2 @@ +# POST GitHub Workflow Logs to Splunk HTTP Event Collector +test diff --git a/.github/actions/post_logs_to_splunk_hec/action.yml b/.github/actions/post_logs_to_splunk_hec/action.yml new file mode 100644 index 0000000..131ce24 --- /dev/null +++ b/.github/actions/post_logs_to_splunk_hec/action.yml @@ -0,0 +1,31 @@ +# action.yml +name: 'Post Logs to Splunk HEC' +description: 'Upload GitHub Workflow logs to Splunk HEC' +inputs: + splunk-url: + description: 'Full URL for Splunk HEC endpoint' + required: true + hec-token: + description: 'Splunk HEC Token' + required: true + sourcetype: + description: 'Splunk Sourcetype' + default: 'github_workflow_log_job' + source: + description: 'GitHub Workflow name' + default: ${{ github.workflow }} + workflowID: + description: 'The Workflow Run number' + default: ${{ github.run_id}} +outputs: + status: + description: 'value is success/fail based on app inspect result' +runs: + using: 'docker' + image: 'Dockerfile' + args: + - ${{ inputs.splunk-url }} + - ${{ inputs.hec-token }} + - ${{ inputs.sourcetype }} + - ${{ inputs.source }} + - ${{ inputs.workflowID }} diff --git a/.github/actions/post_logs_to_splunk_hec/entrypoint.sh b/.github/actions/post_logs_to_splunk_hec/entrypoint.sh new file mode 100755 index 0000000..e631a1e --- /dev/null +++ b/.github/actions/post_logs_to_splunk_hec/entrypoint.sh @@ -0,0 +1,46 @@ +#!/bin/sh -l + +python3 -m pip install requests +echo " +import os +import requests +import re +from datetime import datetime +import json + +logfile = open(os.path.join(os.path.dirname(os.path.abspath(__file__)), \"file.log\"),'r') +Lines = logfile.readlines() + +batch = count = 0 +url = \"$1services/collector/event\" +token=\"$2\" +headers = {\"Authorization\": \"Splunk \"+token} +sourcetype = \"$3\" +eventBatch = \"\" +workflowID=\"$5\" +source=\"$4\" +host=\"$HOSTNAME\" + +for line in Lines: + count+=1 + timestamp = re.search(\"\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z\",line.strip()) + timestamp = re.sub(\"\dZ\",\"\",timestamp.group()) + timestamp = datetime.strptime(timestamp,\"%Y-%m-%dT%H:%M:%S.%f\") + timestamp = (timestamp - datetime(1970,1,1)).total_seconds() + x = re.sub(\"\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z\",\"\",line.strip()) + x=x.strip() + fields = {'lineNumber':count,'workflowID':workflowID} + if x: + batch+=1 + event={'event':x,'sourcetype':sourcetype,'source':source,'host':host,'time':timestamp,'fields':fields} + eventBatch=eventBatch+json.dumps(event) + else: + print(\"skipped line \"+str(count)) + + if batch>=1000: + batch=0 + x=requests.post(url, data=eventBatch, headers=headers) + eventBatch=\"\" + +x=requests.post(url, data=eventBatch, headers=headers)" > t.py +python3 t.py diff --git a/.github/workflows/appinspect_api.yml b/.github/workflows/appinspect_api.yml index 99a88cd..2053040 100644 --- a/.github/workflows/appinspect_api.yml +++ b/.github/workflows/appinspect_api.yml @@ -45,3 +45,17 @@ jobs: splunkUser: ${{ secrets.SPLUNKBASE_USER }} splunkPassword: ${{ secrets.SPLUNKBASE_PASSWORD }} includedTags: cloud + - name: Release + uses: fnkr/github-action-ghr@v1 + if: startsWith(github.ref, 'refs/tags/') + env: + GHR_PATH: ./dist/github_app_for_splunk.spl + GITHUB_TOKEN: ${{ secrets.API_TOKEN }} + - name: Publish App to Splunkbase + uses: ./.github/actions/appinspect_publish # Uses an action in the root directory + with: + APP_ID: '5596' + APP_FILE: './dist/github_app_for_splunk.spl' + SPLUNK_USERNAME: ${{ secrets.SPLUNKBASE_USER }} + SPLUNK_PASSWORD: ${{ secrets.SPLUNKBASE_PASSWORD }} + SPLUNK_VERSION: '8.0,8.1,8.2,9.0' diff --git a/.github/workflows/appinspect_cli.yml b/.github/workflows/appinspect_cli.yml index 06517ad..4ed053d 100644 --- a/.github/workflows/appinspect_cli.yml +++ b/.github/workflows/appinspect_cli.yml @@ -20,6 +20,14 @@ jobs: steps: - uses: actions/checkout@v2 + - name: Set up Python 3.x + uses: actions/setup-python@v4 + with: + # Semantic version range syntax or exact version of a Python version + python-version: '3.9' + # Optional - x64 or x86 architecture, defaults to x64 + architecture: 'x64' + - name: Install deps uses: CultureHQ/actions-yarn@master with: @@ -35,10 +43,6 @@ jobs: with: args: build - - name: Update Permissions - run: | - chmod +x ./.github/actions/appinspect_cli/entrypoint.sh - - name: Update Version Number run: | old_str="X.Y.Z" @@ -46,7 +50,23 @@ jobs: sed -i "s/$old_str/$new_str/g" package.json sed -i "s/$old_str/$new_str/g" ./github_app_for_splunk/default/app.conf + - name: Install slim + run: | + pip install https://download.splunk.com/misc/packaging-toolkit/splunk-packaging-toolkit-1.0.1.tar.gz + + - name: Create package + run: | + mkdir build + slim package ./github_app_for_splunk + - name: Run App Inspect CLI - uses: ./.github/actions/appinspect_cli + uses: splunk/appinspect-cli-action@v1.5 + with: + app_path: github_app_for_splunk-1.0.0.tar.gz + included_tags: cloud, splunk_appinspect + + - name: Upload package + uses: actions/upload-artifact@v3 with: - app-path: github_app_for_splunk/ + name: github_app_for_splunk-1.0.0.tar.gz + path: ./github_app_for_splunk-1.0.0.tar.gz diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000..6078826 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,71 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ main ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ main ] + schedule: + - cron: '19 20 * * 2' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'javascript' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] + # Learn more: + # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # â„šī¸ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # âœī¸ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/log_to_splunk.yml b/.github/workflows/log_to_splunk.yml new file mode 100644 index 0000000..1a708f1 --- /dev/null +++ b/.github/workflows/log_to_splunk.yml @@ -0,0 +1,33 @@ +name: Send Workflow Logs to Splunk + +# Controls when the action will run. +on: + workflow_run: + workflows: ["*"] + types: + - completed + +env: + triggerID: ${{ github.event.workflow_run.id }} + triggerJob: ${{ github.event.workflow_run.name }} + +jobs: + WriteLogs: + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.name!='WriteLogs'}} + + steps: + - uses: actions/checkout@v2 + + - name: Output Job ID + run: echo ${{ github.event.workflow_run.id }} + + - name: Send Workflow logs to Splunk + if: ${{ always() }} + uses: ./.github/actions/log_to_splunk + with: + splunk-url: ${{ secrets.HEC_URL }} + hec-token: ${{ secrets.HEC_TOKEN }} + github-token: ${{ secrets.API_TOKEN }} + workflowID: ${{ env.triggerID }} + source: ${{ env.triggerJob }} diff --git a/.github/workflows/scorecards-analysis.yml b/.github/workflows/scorecards-analysis.yml new file mode 100644 index 0000000..8b491e4 --- /dev/null +++ b/.github/workflows/scorecards-analysis.yml @@ -0,0 +1,54 @@ +name: Scorecards supply-chain security +on: + # Only the default branch is supported. + branch_protection_rule: + schedule: + - cron: '19 20 * * 2' + + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecards analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + actions: read + contents: read + + steps: + - name: "Checkout code" + uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # v2.4.0 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@c8416b0b2bf627c349ca92fc8e3de51a64b005cf # v1.0.2 + with: + results_file: results.sarif + results_format: sarif + # Read-only PAT token. To create it, + # follow the steps in https://github.com/ossf/scorecard-action#pat-token-creation. + repo_token: ${{ secrets.SCORECARD_READ_TOKEN }} + # Publish the results to enable scorecard badges. For more details, see + # https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories, `publish_results` will automatically be set to `false`, + # regardless of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). + - name: "Upload artifact" + uses: actions/upload-artifact@82c141cc518b40d92cc801eee768e7aafc9c2fa2 # v2.3.1 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@5f532563584d71fdef14ee64d17bafb34f751ce5 # v1.0.26 + with: + sarif_file: results.sarif diff --git a/.gitignore b/.gitignore index 2d3f6b3..11a4e9c 100644 --- a/.gitignore +++ b/.gitignore @@ -114,7 +114,6 @@ out # Nuxt.js build / generate output .nuxt -dist # Gatsby files .cache/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..4fc208e --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Splunk GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index a9d6f50..07b4ac2 100644 --- a/README.md +++ b/README.md @@ -1,42 +1,44 @@ -# Github App for Splunk +# GitHub App for Splunk -The Github App for Splunk is a collection of out of the box dashboards and Splunk knowledge objects designed to give Github Admins and platform owners immediate visibility into Github. +The GitHub App for Splunk is a collection of out of the box dashboards and Splunk knowledge objects designed to give GitHub Admins and platform owners immediate visibility into GitHub. -This App is designed to work across multiple Github data sources however not all all required. You may choose to only collect a certain set of data and the parts of this app that utilize that set will function, while those that use other data sources will not function correctly, so please only use the Dashboards that relate to the data you are collecting. +This App is designed to work across multiple GitHub data sources however not all all required. You may choose to only collect a certain set of data and the parts of this app that utilize that set will function, while those that use other data sources will not function correctly, so please only use the Dashboards that relate to the data you are collecting. -The Github App for Splunk is designed to work with the following data sources: +The GitHub App for Splunk is designed to work with the following data sources: -* [Github Audit Log Monitoring Add-On For Splunk](./docs/ghe_audit_logs.md): Audit logs from Github Enterprise Cloud. -* [Github.com Webhooks](./docs/github_webhooks.md): A select set of webhook events like Push, PullRequest, and Repo. -* [Github Enterprise Server Syslog Forwarder](https://docs.github.com/en/enterprise-server/admin/user-management/monitoring-activity-in-your-enterprise/log-forwarding): Audit and Application logs from Github Enterprise Server. -* [Github Enterprise Collectd monitoring](./docs/splunk_collectd_forwarding_for_ghes.md): Performance and Infrastructure metrics from Github Enterprise Server. +* [GitHub Audit Log Collection](./docs/ghe_audit_logs.MD): Audit logs from GitHub Enterprise Cloud and Server. +* [Github.com Webhooks](./docs/github_webhooks.MD): A select set of webhook events like Push, PullRequest, Code Scanning and Repo. +* [Github Enterprise Collectd monitoring](./docs/splunk_collectd_forwarding_for_ghes.MD): Performance and Infrastructure metrics from Github Enterprise Server. ## Dashboard Instructions ### Installation -The Github App for Splunk is available for download from [Splunkbase](https://splunkbase.splunk.com/app/5596/). For Splunk Cloud, refer to [Install apps in your Splunk Cloud deployment](https://docs.splunk.com/Documentation/SplunkCloud/latest/Admin/SelfServiceAppInstall). For non-Splunk Cloud deployments, refer to the standard methods for Splunk Add-on installs as documented for a [Single Server Install](http://docs.splunk.com/Documentation/AddOns/latest/Overview/Singleserverinstall) or a [Distributed Environment Install](http://docs.splunk.com/Documentation/AddOns/latest/Overview/Distributedinstall). +The GitHub App for Splunk is available for download from [Splunkbase](https://splunkbase.splunk.com/app/5596/). For Splunk Cloud, refer to [Install apps in your Splunk Cloud deployment](https://docs.splunk.com/Documentation/SplunkCloud/latest/Admin/SelfServiceAppInstall). For non-Splunk Cloud deployments, refer to the standard methods for Splunk Add-on installs as documented for a [Single Server Install](http://docs.splunk.com/Documentation/AddOns/latest/Overview/Singleserverinstall) or a [Distributed Environment Install](http://docs.splunk.com/Documentation/AddOns/latest/Overview/Distributedinstall). **This app should be installed on both your search head tier as well as your indexer tier.** - + ### Configuration ![Settings>Advanced Search>Search macros](./docs/images/macros.png) -1. The Github App for Splunk uses macros so that index and `sourcetype` names don't need to be updated in each dashboard panel. You'll need to update the macros to account for your selected indexes. -1. The macro `github_source` is the macro for all audit log events, whether from Github Enterprise Cloud or Server. The predefined macro includes examples of **BOTH**. Update to account for your specific needs. +1. The GitHub App for Splunk uses macros so that index and `sourcetype` names don't need to be updated in each dashboard panel. You'll need to update the macros to account for your selected indexes. +1. The macro `github_source` is the macro for all audit log events, whether from GitHub Enterprise Cloud or Server. The predefined macro includes examples of **BOTH**. Update to account for your specific needs. 1. The macro `github_webhooks` is the macro used for all webhook events. Since it is assuming a single index for all webhook events, that is the predefined example, but update as needed. -1. Finally, the macro `github_collectd` is the macro used for all `collectd` metrics sent from Github Enterprise Server. Please update accordingly. +1. Finally, the macro `github_collectd` is the macro used for all `collectd` metrics sent from GitHub Enterprise Server. Please update accordingly. ### Integration Overview dashboard -There is an *Integration Overview* dashboard listed under *Dashboards* that allows you to monitor API rate limits, audit events fetched, or webhooks received. This dashboard is primarily meant to be used with the `Github Audit Log Monitoring Add-On for Splunk` and uses internal Splunk logs. To be able to view them you will probably need elevated privileges in Splunk that include access to the `_internal` index. Please coordinate with your Splunk team if that dashboard is desired. +There is an *Integration Overview* dashboard listed under *Dashboards* that allows you to monitor API rate limits, audit events fetched, or webhooks received. This dashboard is primarily meant to be used with the `GitHub Audit Log Monitoring Add-On for Splunk` and uses internal Splunk logs. To be able to view them you will probably need elevated privileges in Splunk that include access to the `_internal` index. Please coordinate with your Splunk team if that dashboard is desired. ### Examples
Expand for screenshots +#### Code Scanning Alerts + ![Code Scanning Dashboard](./docs/images/code_scanning_dashboard.png) + #### Audit Log Dashboard ![Audit Log Dashboard](./docs/images/9F8E9A89-1203-4C0A-B227-C2FD1E17C8B0.jpg) @@ -59,4 +61,4 @@ There is an *Integration Overview* dashboard listed under *Dashboards* that allo ## Support -Support for Github App for Splunk is run through [Github Issues](https://github.com/splunk/github_app_for_splunk/issues). Please open a new issue for any support issues or for feature requests. You may also open a Pull Request if you'd like to contribute additional dashboards, eventtypes for webhooks, or enhancements you may have. +Support for GitHub App for Splunk is run through [GitHub Issues](https://github.com/splunk/github_app_for_splunk/issues). Please open a new issue for any support issues or for feature requests. You may also open a Pull Request if you'd like to contribute additional dashboards, eventtypes for webhooks, or enhancements you may have. diff --git a/docs/ghe_audit_logs.MD b/docs/ghe_audit_logs.MD index 60c6a46..34b0819 100644 --- a/docs/ghe_audit_logs.MD +++ b/docs/ghe_audit_logs.MD @@ -1,46 +1,13 @@ # GitHub Enterprise Audit Log Monitoring -> Splunk modular input plugin to fetch the enterprise audit log from GitHub Enterprise - -Support for modular inputs in Splunk Enterprise 5.0 and later enables you to add new types of inputs to Splunk Enterprise that are treated as native Splunk Enterprise inputs. - -This modular input makes an HTTPS request to the GitHub Enterprise's Audit Log REST API endpoint at a definable interval to fetch audit log data. - -![Splunk modular input demo](./images/C70F5295-D9FA-48FC-90CA-A7BD397AEC35.png) - ## Prerequisites -- Splunk v7.3.5+ -- Python 2.7+ - - Successfully tested with python 3.x but support is not guaranteed yet +- Splunk v8.x+ +- Python 3.x ## Installation -1. SSH to your Splunk server - -2. Download the latest release from [Releases](https://github.com/splunk/github-audit-log-monitoring-add-on-for-splunk/releases) - -3. Copy the tarball to the apps directory and extract it: - - ```sh - $ cp splunk-ghe-audit-log-monitoring-.tar.gz $SPLUNK_HOME/etc/apps/ - - $ mkdir -p $SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring - - $ tar xf $SPLUNK_HOME/etc/apps/splunk-ghe-audit-log-monitoring-.tar.gz -C $SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring --strip-components=1 - - # Optional depending on the user executing the previous actions - $ sudo chown -R splunk:splunk $SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring - - # Make the state directory writable by the group - $ sudo chmod -R 775 /opt/splunk/etc/apps/ghe_audit_log_monitoring/state - ``` - -4. Restart the Splunk server - -5. Generate a Personal Access Token in GitHub Enterprise with the `site_admin` scope. - -6. Configure and the GitHub Enterprise Audit Log Monitoring by entering the necessary information in the input fields +Installation and configuration documents for the [Splunk Add-on for GitHub](https://docs.splunk.com/Documentation/AddOns/released/GitHub/About) is available in our official Splunk docs. This add-on can be used for both GitHub Enterprise Cloud and Server. To configure for each specific environment, please refer to the official docs. ## Configuration @@ -52,6 +19,8 @@ The following are the required scopes for the personal access token allowing the - [x] manage_billing:enterprise `Read and write enterprise billing data` - [x] read:enterprise `Read enterprise profile data` +** The PAT must be generated by someone that is an Enterprise Owner** + ### Input Fields ![Modular input configuration view](./images/79E9DCE3-B358-4BAC-9667-7866C2CE4D00.png) @@ -64,9 +33,12 @@ The following are the required scopes for the personal access token allowing the - **Hostname** - - This is the hostname of your GitHub Enterprise instance. Make sure there are no trailing `/` in the URL provided. This could either be a FQDN or an IP address. Do not append any paths beyond the tld. + - This is the hostname of your GitHub Enterprise instance. Make sure there are no trailing `/` in the URL provided. This could either be a FQDN or an IP address. Do not append any paths beyond the tld. **Most Users Will Not Need to change this!** - Example: [https://api.github.com](https://api.github.com) +- **Account Type** + - This is the type of GitHub account you are using. GitHub Enterprise Cloud users should keep it at `enterprise`, however some users that only have an enterprise tier paid Organization should change it to `organization`. If you can't tell which you have, go to your user icon in GitHub in the upper right corner. If you have an entry listed as "Your enterprises", then you should use `enterprise`, otherwise use `organization`. + - **Enterprise** - The enterprise name for which to fetch audit log events @@ -124,7 +96,7 @@ This modular input fetches events by calling the [Enterprise Audit Log API](http ### Activity dashboard example -Along with this modular input we're providing a [Github App for Splunk](https://github.com/splunk/github_app_for_splunk) that makes use of the collected audit log events to give you an overview of the activities across your enterprise. +Along with this modular input we're providing a [GitHub App for Splunk](https://github.com/splunk/github_app_for_splunk) that makes use of the collected audit log events to give you an overview of the activities across your enterprise. You can install it via the [Manage Apps page](https://docs.splunk.com/Documentation/Splunk/8.2.0/Admin/Deployappsandadd-ons). @@ -151,30 +123,3 @@ If you've enabled debug mode be ready to change your personal access token becau ### Why can't I use a GitHub app instead of a personal access token? GitHub apps cannot be installed on the enterprise level. The REST API requires enterprise admin privileges which are out of scope for GitHub apps. - -## Troubleshooting - -### Read logs in Splunk - -You can use this search query to fetch all the logs belonging to this module when **Debug Mode** is enabled. - -```sh -index="_internal" source="/opt/splunk/var/log/splunk/splunkd.log" ghe_audit_log_monitoring -``` - -### Test the modular input for syntax problems - -Run this test if you don't see anything in the logs (which is a highly unlikely scenario). This will display any syntax errors if there are any. - -```sh -sudo $SPLUNK_HOME/bin/splunk cmd python $SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring/bin/ghe_audit_log_monitoring.py -``` - -### Where are state files stored? - -State files for enterprises are stored in this directory: - -```sh -$SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring/state/ -``` -Test diff --git a/docs/ghes_syslog_setup.MD b/docs/ghes_syslog_setup.MD new file mode 100644 index 0000000..5207419 --- /dev/null +++ b/docs/ghes_syslog_setup.MD @@ -0,0 +1,3 @@ +# Sending GitHub Enterprise Server Logs to Splunk + +GitHub Enterprise Server comes with syslog-ng built in to send data to platforms like Splunk and we can take advantage of that with the [Splunk Add-on for GitHub](https://splunkbase.splunk.com/app/6254/). Setup details and documentation is available on [Splunk Docs](https://docs.splunk.com/Documentation/AddOns/released/GitHub/About). diff --git a/docs/github_webhooks.MD b/docs/github_webhooks.MD index 12ea7d1..cd21373 100644 --- a/docs/github_webhooks.MD +++ b/docs/github_webhooks.MD @@ -1,10 +1,10 @@ -# Using Github Webhooks +# Using GitHub Webhooks -Github Webhooks are a great way to collect rich information as it occurs. You can easily enable webhooks within the Github UI and can even select specific actions on which to trigger a webhook call to Splunk. This is only available at the Organization level and will require this to be done for each Org as desired. To do so, you'll need to configure Splunk as a receiver and then setup the webhooks within Github. +GitHub Webhooks are a great way to collect rich information as it occurs. You can easily enable webhooks within the GitHub UI and can even select specific actions on which to trigger a webhook call to Splunk. This is only available at the Organization level and will require this to be done for each Org as desired. To do so, you'll need to configure Splunk as a receiver and then setup the webhooks within GitHub. ## Configuring Splunk to receive Webhooks -Splunk's HTTP Event Collector (HEC) is a quick and easy endpoint built to receive data from other producers like Github. +Splunk's HTTP Event Collector (HEC) is a quick and easy endpoint built to receive data from other producers like GitHub. ### Setting Up Splunk to Listen for Webhooks 1. Under Settings > Data Inputs, click **HTTP Event Collector** @@ -13,19 +13,19 @@ Splunk's HTTP Event Collector (HEC) is a quick and easy endpoint built to receiv 1. Unless required by your Splunk administrator, the rest of this page can be left as is and continue onto the next step. 1. You'll want to click `select` for Source Type, and a new selection box will appear below that. 1. Under the Application option, there should be an entry for `github_json`, however you may need to use the little search bar to find it. -1. For App Context, you'll want to select **Splunk App for Github** +1. For App Context, you'll want to select **Splunk App for GitHub** 1. Next select the index created for this data. If none exist, create a new Index. Names like `github` or the like are recommended, depending on corporate naming conventions. 1. Lastly, click the Review button and confirm the data is correct and hit Submit. Your token is now available to collect data, however we'll need to enable that token to allow Query String Authentication using that token. For this, you'll need command line access to your Splunk environment or be using a deployment server to deploy apps to Splunk. -To enable Query String Authentication, you'll need to update the `inputs.conf` file within the Splunk App for Github local directory. In that file, there will be a stanza with the name and value of the token you created. At the end of that stanza, you'll need to add `allowQueryStringAuth = true` and then restart Splunk. This is best done with the help of your Splunk team, so please reach out to them for assistance on this step. +To enable Query String Authentication, you'll need to update the `inputs.conf` file within the Splunk App for GitHub local directory. In that file, there will be a stanza with the name and value of the token you created. At the end of that stanza, you'll need to add `allowQueryStringAuth = true` and then restart Splunk. This is best done with the help of your Splunk team, so please reach out to them for assistance on this step. -### Setting Up Github Webhooks +### Setting Up GitHub Webhooks Webhooks are a simple push mechanism that will send an event each time the webhook is triggered. Unfortunately, Webhooks are unique to each Organization and will need to be setup for each Org as desired. To do this, a user will need to be an Admin for the Org. -1. In your Github Organization Settings page, select Webhooks from the menu on the left. +1. In your GitHub Organization Settings page, select Webhooks from the menu on the left. 1. On this page, you'll see all the existing Webhooks, click the **Add webhook** button to add one to send data to Splunk. 1. The Payload URL will be the Splunk HTTP Event Collector endpoint that was enabled above. It should look something like: `https://YOUR SPLUNK URL:8088/services/collector/raw?token=THE TOKEN FROM ABOVE`. The default port of 8088 may be different for your Splunk Environment, so please confirm the HEC port with your Splunk Admin team. 1. For Content Type, you'll want to select `application/json` as the best option. @@ -41,27 +41,42 @@ Once that is complete and webhooks are triggering, you'll want to update the mac - + - + - + - + - + + + + + + + + + + + + + + + +
Splunk EventtypeGithub Webhook EventGitHub Webhook Event Description
Github::RepoGitHub::Repo Repositories Repository created, deleted, archived, unarchived, publicized, privatized, edited, renamed, or transferred.
Github::PushGitHub::Push Pushes Git push to a repository.
Github::PullRequestGitHub::PullRequest Pull requests Pull request opened, closed, reopened, edited, assigned, unassigned, review requested, review request removed, labeled, unlabeled, synchronized, ready for review, converted to draft, locked, unlocked, auto merge enabled, auto merge disabled, milestoned, or demilestoned.
Github::PullRequest::ReviewGitHub::PullRequest::Review Pull request reviews Pull request review submitted, edited, or dismissed.
GitHub::CodeScanningCode scanning alertsAlerts identified by CodeQL and other 3rd party/OSS scanning tools.
GitHub::VulnerabilityAlertRepository vulnerability alertsDependabot alert (aka dependency vulnerability alert) created, resolved, or dismissed on a repository.
GitHub::SecretScanningSecret scanning alertsSecrets scanning alert created, resolved, or reopened.
diff --git a/docs/images/79E9DCE3-B358-4BAC-9667-7866C2CE4D00.png b/docs/images/79E9DCE3-B358-4BAC-9667-7866C2CE4D00.png index d9933d9..188b3e3 100644 Binary files a/docs/images/79E9DCE3-B358-4BAC-9667-7866C2CE4D00.png and b/docs/images/79E9DCE3-B358-4BAC-9667-7866C2CE4D00.png differ diff --git a/docs/images/code_scanning_dashboard.png b/docs/images/code_scanning_dashboard.png new file mode 100644 index 0000000..92135b9 Binary files /dev/null and b/docs/images/code_scanning_dashboard.png differ diff --git a/docs/images/screenshots/BETA_GHES_Alerts.png b/docs/images/screenshots/BETA_GHES_Alerts.png new file mode 100644 index 0000000..301d582 Binary files /dev/null and b/docs/images/screenshots/BETA_GHES_Alerts.png differ diff --git a/docs/images/screenshots/BETA_authentication_dashboard.png b/docs/images/screenshots/BETA_authentication_dashboard.png new file mode 100644 index 0000000..4113111 Binary files /dev/null and b/docs/images/screenshots/BETA_authentication_dashboard.png differ diff --git a/docs/images/screenshots/BETA_security_alert_dashboard.png b/docs/images/screenshots/BETA_security_alert_dashboard.png new file mode 100644 index 0000000..5c74c6a Binary files /dev/null and b/docs/images/screenshots/BETA_security_alert_dashboard.png differ diff --git a/docs/images/screenshots/BETA_value_stream_dashboard.png b/docs/images/screenshots/BETA_value_stream_dashboard.png new file mode 100644 index 0000000..fe4a464 Binary files /dev/null and b/docs/images/screenshots/BETA_value_stream_dashboard.png differ diff --git a/docs/images/screenshots/BETA_workflow_analytics_dashboard.png b/docs/images/screenshots/BETA_workflow_analytics_dashboard.png new file mode 100644 index 0000000..f8e3ac8 Binary files /dev/null and b/docs/images/screenshots/BETA_workflow_analytics_dashboard.png differ diff --git a/docs/images/screenshots/process_monitor_dashboard.png b/docs/images/screenshots/process_monitor_dashboard.png new file mode 100644 index 0000000..51532c9 Binary files /dev/null and b/docs/images/screenshots/process_monitor_dashboard.png differ diff --git a/docs/images/screenshots/system_health_dashboard.png b/docs/images/screenshots/system_health_dashboard.png new file mode 100644 index 0000000..e224a86 Binary files /dev/null and b/docs/images/screenshots/system_health_dashboard.png differ diff --git a/docs/splunk_collectd_forwarding_for_ghes.MD b/docs/splunk_collectd_forwarding_for_ghes.MD index 0273458..d491b05 100644 --- a/docs/splunk_collectd_forwarding_for_ghes.MD +++ b/docs/splunk_collectd_forwarding_for_ghes.MD @@ -1,4 +1,4 @@ -# Splunk Collectd Forwarding for Github Enterprise Server +# Splunk Collectd Forwarding for GitHub Enterprise Server This guide describes how to enable collectd forwarding on GitHub Enterprise Server (GHES) using Splunk Enterprise (v8.0+). diff --git a/docs/value_stream_stages.MD b/docs/value_stream_stages.MD new file mode 100644 index 0000000..2f1f3f7 --- /dev/null +++ b/docs/value_stream_stages.MD @@ -0,0 +1,90 @@ +# Value Stream Mapping for DevSecOps + +To better understand the flow of work from ideation to release, we can define a map of the steps that work must go through. + +1. **Ideation** - Creating the initial idea for the work +1. **Acknowledgement** - Accepting the work and planning for it +1. **Working** - Working on the tasks +1. **Merge** - Completing the work +1. **Review** - Reviewing the completed work +1. **Testing** - Testing the work and preparing for release +1. **Pending** - Waiting for release +1. **Released** - Work is released + +All work should fit into one of these categories and should reference an id for the Issue/Card/Task. + +## Stage Definitions + +### Ideation + +This is the initial request for work, typically a task of some kind in a project management system. + +These are the data types that would define this phase: +- Issue creation +- Project Card creation + +### Acknowledgement + +This is the phase where an idea has gone from the backlog, to a planning phase in a project management system. + +These are the data types that would define this phase: +- Issue labeled as "to do" +- Issue assigned a sprint/milestone +- Project Card moved to a "to do" column + +### Working + +This is the phase where the idea becomes reality and is typically when code is being written. + +These are the data types that would define this phase: +- Issue labeled as "in progress" +- Project Card moved to an "in progress" column +- Branch created +- First Push to main branch + +### Merge + +This is the phase where work on the task has been completed. + +These are the data types that would define this phase: +- Issue labeled as "Done" +- Project Card moved to a "Done" column +- Pull Request created +- Last Push to main branch + +### Review + +This is the phase where a code change would be under review. The change would be complete but not ready for testing or release. This may or may not include CI testing or manual code reviews. + +These are the data types that would define this phase: +- Issue labeled as "under review" +- Project Card moved to a "under review" column +- Pipeline/Workflow execution on a Pull Request begins +- Pull Request created + +### Testing + +This is the phase where code is undergoing automated testing through a CI process. This is distinct from the Review phase as it should be part of the release testing, not merge testing. This data is typically found within the CI tools being used. + +These are the data types that would define this phase: +- Pipeline execution starts/ends + +### Pending + +This is the phase where code has been created, reviewed, tested, and approved and is typically measured more by the separation of events than any specific data point. + +These are the data types that would define this phase: +- Pipeline execution complete +- Deployment started + +### Released + +This is the phase where the task is complete and is being used. + +These are the data types that would define this phase: +- Issue closed +- Project Card moved to "released" column +- Kubernetes deployment +- Artifact uploaded +- Container pushed to registry +- Other customer specific markers diff --git a/github_app_for_splunk/README.md b/github_app_for_splunk/README.md index e36796a..d6c1918 100644 --- a/github_app_for_splunk/README.md +++ b/github_app_for_splunk/README.md @@ -1,30 +1,30 @@ -# Github App for Splunk +# GitHub App for Splunk -The Github App for Splunk is a collection of out of the box dashboards and Splunk knowledge objects designed to give Github Admins and platform owners immediate visibility into Github. +The GitHub App for Splunk is a collection of out of the box dashboards and Splunk knowledge objects designed to give GitHub Admins, platform owners, and Security Engineers immediate visibility into GitHub. -This App is designed to work across multiple Github data sources however not all all required. You may choose to only collect a certain set of data and the parts of this app that utilize that set will function, while those that use other data sources will not function correctly, so please only use the Dashboards that relate to the data you are collecting. +This App is designed to work across multiple GitHub data sources however not all all required. You may choose to only collect a certain set of data and the parts of this app that utilize that set will function, while those that use other data sources will not function correctly, so please only use the Dashboards that relate to the data you are collecting. -The Github App for Splunk is designed to work with the following data sources: +The GitHub App for Splunk is designed to work with the following data sources: -* [Github Audit Log Monitoring Add-On For Splunk](./docs/ghe_audit_logs.MD): Audit logs from Github Enterprise Cloud. -* [Github.com Webhooks]((./docs/github_webhooks.MD)): A select set of webhook events like Push, PullRequest, and Repo. -* [Github Enterprise Server Syslog Forwarder](https://docs.github.com/en/enterprise-server@3.0/admin/user-management/monitoring-activity-in-your-enterprise/log-forwarding): Audit and Application logs from Github Enterprise Server. -* [Github Enterprise Collectd monitoring](./docs/splunk_collectd_forwarding_for_ghes.MD): Performance and Infrastructure metrics from Github Enterprise Server. +* [GitHub Audit Log Monitoring Add-On For Splunk](./docs/ghe_audit_logs.MD): Audit logs from GitHub Enterprise Cloud. +* [Github.com Webhooks]((./docs/github_webhooks.MD)): A select set of webhook events like Push, PullRequest, Repo, and Code Scanning alerts. +* [GitHub Enterprise Server Syslog Forwarder](https://docs.github.com/en/enterprise-server@3.0/admin/user-management/monitoring-activity-in-your-enterprise/log-forwarding): Audit and Application logs from GitHub Enterprise Server. +* [GitHub Enterprise Collectd monitoring](./docs/splunk_collectd_forwarding_for_ghes.MD): Performance and Infrastructure metrics from GitHub Enterprise Server. ## Dashboard Instructions -The Github App for Splunk is available for download from [Splunkbase](https://splunkbase.splunk.com/app/5596/). Once installed there are a couple steps needed to light up all the dashboards. +The GitHub App for Splunk is available for download from [Splunkbase](https://splunkbase.splunk.com/app/5596/). Once installed there are a couple steps needed to light up all the dashboards. ![Settings>Advanced Search>Search macros](./docs/images/macros.png) -1. The Github App for Splunk uses macros so that index and sourcetype names don't need to be updated in each dashboard panel. You'll need to update the macros to account for your selected indexes. -1. The macro `github_source` is the macro for all audit log events, whether from Github Enterprise Cloud or Server. The predefined maco includes examples of **BOTH**. Update to account for your specific needs. +1. The GitHub App for Splunk uses macros so that index and sourcetype names don't need to be updated in each dashboard panel. You'll need to update the macros to account for your selected indexes. +1. The macro `github_source` is the macro for all audit log events, whether from GitHub Enterprise Cloud or Server. The predefined maco includes examples of **BOTH**. Update to account for your specific needs. 1. The macro `github_webhooks` is the macro used for all webhook events. Since it is assuming a single index for all webhook events, that is the predefined example, but update as needed. -1. Finally, the macro `github_collectd` is the macro used for all collectd metrics sent from Github Enterprise Server. Please update accordingly. +1. Finally, the macro `github_collectd` is the macro used for all collectd metrics sent from GitHub Enterprise Server. Please update accordingly. ### Integration Overview dashboard -There is an *Integration Overview* dashboard listed under *Dashboards* that allows you to monitor API rate limits, audit events fetched, or webhooks received. This dashboard is primarily meant to be used with the `Github Audit Log Monitoring Add-On for Splunk` and uses internal Splunk logs. To be able to view them you will probably need elevated privileges in Splunk that include access to the `_internal` index. Please coordinate with your Splunk team if that dashboard is desired. +There is an *Integration Overview* dashboard listed under *Dashboards* that allows you to monitor API rate limits, audit events fetched, or webhooks received. This dashboard is primarily meant to be used with the `GitHub Audit Log Monitoring Add-On for Splunk` and uses internal Splunk logs. To be able to view them you will probably need elevated privileges in Splunk that include access to the `_internal` index. Please coordinate with your Splunk team if that dashboard is desired. ## Support -Support for Github App for Splunk is run through [Github Issues](https://github.com/splunk/github_app_for_splunk/issues). Please open a new issue for any support issues or for feature requests. You may also open a Pull Request if you'd like to contribute additional dashboards, eventtypes for webhooks, or enhancements you may have. +Support for GitHub App for Splunk is run through [GitHub Issues](https://github.com/splunk/github_app_for_splunk/issues). Please open a new issue for any support issues or for feature requests. You may also open a Pull Request if you'd like to contribute additional dashboards, eventtypes for webhooks, or enhancements you may have. diff --git a/github_app_for_splunk/appserver/static/custom.css b/github_app_for_splunk/appserver/static/custom.css new file mode 100644 index 0000000..697e6d4 --- /dev/null +++ b/github_app_for_splunk/appserver/static/custom.css @@ -0,0 +1,24 @@ +/* custom.css */ + +/* Define icon styles */ + +td.icon { + text-align: center; +} + +td.icon i { + font-size: 30px; + text-shadow: 1px 1px #aaa; +} + +td.icon .failure { + color: red; +} + +td.icon .in_progress { + color: yellow; +} + +td.icon .success { + color: green; +} diff --git a/github_app_for_splunk/appserver/static/example_customtables.js b/github_app_for_splunk/appserver/static/example_customtables.js new file mode 100644 index 0000000..4ad3951 --- /dev/null +++ b/github_app_for_splunk/appserver/static/example_customtables.js @@ -0,0 +1,132 @@ +require([ + "underscore", + "splunkjs/mvc", + "splunkjs/mvc/searchmanager", + "splunkjs/mvc/tableview", + "splunkjs/mvc/simplexml/ready!" +], function( + _, + mvc, + SearchManager, + TableView +) { + + mvc.Components.revokeInstance("myCustomRowSearch"); + + // Set up search managers + var myCustomRowSearch = new SearchManager({ + id: "myCustomRowSearch", + preview: true, + cache: true, + search: "`github_webhooks` \"workflow_run.name\"=\"*\" | spath \"repository.full_name\" | search repository.full_name=* | eval started=if(action=\"requested\",_time,NULL), completed=if(action=\"completed\",_time, NULL), created=round(strptime('workflow_run.created_at',\"%Y-%m-%dT%H:%M:%SZ\")) | stats latest(created) as created, latest(started) as started, latest(completed) as completed, latest(duration) as duration, latest(workflow_run.conclusion) as workflow_run.conclusion by repository.full_name,workflow_run.name,workflow_run.id | eval started=if(isnull(started), created, started) | eval duration=if(isnotnull(completed),tostring(completed-started,\"Duration\"),\"In Progress\") | rename workflow_run.conclusion as status, repository.full_name as \"Repository Name\", workflow_run.name as \"Workflow Name\", workflow_run.id as \"Run ID\" | table status, \"Repository Name\", \"Workflow Name\", \"Run ID\", duration,completed|sort completed|fields - completed", + earliest_time: mvc.tokenSafe("$timeTkn.earliest$"), + latest_time: mvc.tokenSafe("$timeTkn.latest$") + }); + + // Create a table for a custom row expander + var mycustomrowtable = new TableView({ + id: "table-customrow", + managerid: "myCustomRowSearch", + drilldown: "none", + drilldownRedirect: false, + el: $("#table-customrow") + }); + + // Define icons for the custom table cell + var ICONS = { + failure: "error", + in_progress: "question-circle", + success: "check-circle" + }; + + // Use the BaseCellRenderer class to create a custom table cell renderer + var CustomCellRenderer = TableView.BaseCellRenderer.extend({ + canRender: function(cellData) { + // This method returns "true" for the "range" field + return cellData.field === "status"; + }, + + // This render function only works when canRender returns "true" + render: function($td, cellData) { + console.log("cellData: ", cellData); + + var icon = "question"; + if(ICONS.hasOwnProperty(cellData.value)) { + icon = ICONS[cellData.value]; + } + $td.addClass("icon").html(_.template('', { + icon: icon, + status: cellData.value + })); + } + }); + + // Use the BasicRowRenderer class to create a custom table row renderer + var CustomRowRenderer = TableView.BaseRowExpansionRenderer.extend({ + canRender: function(rowData) { + console.log("RowData: ", rowData); + return true; + }, + + initialize: function(args){ + this._searchManager = new SearchManager({ + id: 'details-search-manager', + preview: false + }); + this._TableView = new TableView({ + id: 'ResultsTable', + managerid: 'details-search-manager', + drilldown: "all", + drilldownRedirect: true + }); + }, + + render: function($container, rowData) { + // Print the rowData object to the console + console.log("RowData: ", rowData); + + var repoNameCell = _(rowData.cells).find(function (cell) { + return cell.field === 'Repository Name'; + }); + + + var workflowName = _(rowData.cells).find(function (cell) { + return cell.field === 'Workflow Name'; + }); + + var workflowIDCell = _(rowData.cells).find(function (cell) { + return cell.field === 'Run ID'; + }); + + this._TableView.on("click", function(e) { + e.preventDefault(); + console.log(e); + window.open("/app/github_app_for_splunk/workflow_details?form.workflow_id="+workflowIDCell.value+"&form.repoName="+repoNameCell.value+"&form.workflowName="+workflowName.value+"&form.field1.earliest=-24h%40h&form.field1.latest=now&form.timeRange.earliest=-30d%40d&form.timeRange.latest=now&form.workflowCount=25",'_self'); + }); + + this._searchManager.set({ search: '`github_webhooks` (workflow_run.id='+workflowIDCell.value+' OR workflow_job.run_id='+workflowIDCell.value+') | eval started=if(action=="requested", _time, null), completed=if(action=="completed", _time,null) | stats latest(workflow_run.conclusion) as Status, earliest(started) as Started, latest(completed) as Completed, latest(workflow_run.head_branch) as Branch, latest(workflow_run.event) as Trigger | eval Duration=tostring(Completed-Started, "Duration") | eval Started=strftime(Started,"%Y-%m-%dT%H:%M:%S"), Completed=strftime(Completed,"%Y-%m-%dT%H:%M:%S")| fields Status, Started, Completed, Duration, Branch, Trigger | eval Details="Click here for Workflow Details" | transpose|rename column AS Details| rename "row 1" AS values'}); + // $container is the jquery object where we can put out content. + // In this case we will render our chart and add it to the $container + $container.append(this._TableView.render().el); + } + }); + + // Create an instance of the custom cell renderer, + // add it to the table, and render the table + var myCellRenderer = new CustomCellRenderer(); + mycustomrowtable.addCellRenderer(myCellRenderer); + mycustomrowtable.render(); + + // Create an instance of the custom row renderer, + // add it to the table, and render the table + var myRowRenderer = new CustomRowRenderer(); + mycustomrowtable.addRowExpansionRenderer(myRowRenderer); + mycustomrowtable.render(); + + mycustomrowtable.on("click", function(e) { + e.preventDefault(); + console.log(e.data); + window.open("/app/github_app_for_splunk/workflow_details?form.repoName="+e.data["row.repository.full_name"]+"&form.workflowName="+e.data["row.workflow_job.name"]+"&form.field1.earliest=-24h%40h&form.field1.latest=now&form.timeRange.earliest=-30d%40d&form.timeRange.latest=now&form.workflowCount=25",'_blank'); + }); + +}); diff --git a/github_app_for_splunk/appserver/static/tabs.css b/github_app_for_splunk/appserver/static/tabs.css index 3e61043..4d7705b 100644 --- a/github_app_for_splunk/appserver/static/tabs.css +++ b/github_app_for_splunk/appserver/static/tabs.css @@ -17,3 +17,26 @@ border-top: 0px; } +/** + * This fixes the issue where the tab focus looks weird. + */ + +.nav-tabs { + background: #212527; +} + +.nav-tabs > li > a { + color: #FFF; +} + +.nav-tabs > li > a:focus { + box-shadow: none !important; +} + +.nav-tabs > li:focus-within:after { + box-shadow: inset -2px 2px 3px rgba(82, 168, 236, .5); +} + +.nav-tabs > li:focus-within:before { + box-shadow: inset 3px 2px 3px rgba(82, 168, 236, .5); +} diff --git a/github_app_for_splunk/appserver/static/tabs.js b/github_app_for_splunk/appserver/static/tabs.js index 32bea79..1858339 100644 --- a/github_app_for_splunk/appserver/static/tabs.js +++ b/github_app_for_splunk/appserver/static/tabs.js @@ -1,178 +1,241 @@ require(['jquery','underscore','splunkjs/mvc', 'bootstrap.tab', 'splunkjs/mvc/simplexml/ready!'], - function($, _, mvc){ - - var tabsInitialzed = []; - - /** - * The below defines the tab handling logic. - */ - - // The normal, auto-magical Bootstrap tab processing doesn't work for us since it requires a particular - // layout of HTML that we cannot use without converting the view entirely to simpleXML. So, we are - // going to handle it ourselves. - var hideTabTargets = function(){ - - var tabs = $('a[data-elements]'); - - // Go through each toggle tab - for(var c = 0; c < tabs.length; c++){ - - // Hide the targets associated with the tab - var targets = $(tabs[c]).data("elements").split(","); - - for(var d = 0; d < targets.length; d++){ - $('#' + targets[d], this.$el).hide(); - } - } - }; - - var rerenderPanels = function(row_id, force){ - - // Set a default argument for dont_rerender_until_needed - if( typeof force === 'undefined'){ - force = true; - } - - // Don't do both if the panel was already rendered - if( !force && _.contains(tabsInitialzed, row_id) ){ - return; - } - - // Get the elements so that we can find the components to re-render - var elements = $('#' + row_id + ' .dashboard-element'); - - // Iterate the list and re-render the components so that they fill the screen - for(var d = 0; d < elements.length; d++){ - - // Determine if this is re-sizable - if( $('#' + row_id + ' .ui-resizable').length > 0){ - - var component = mvc.Components.get(elements[d].id); - - if(component){ - component.render(); - } - } - } - - // Remember that we initialized this tab - tabsInitialzed.push(row_id); - }; - - var selectTab = function (e) { - - // Stop if the tabs have no elements - if( $(e.target).data("elements") === undefined ){ - console.warn("Yikes, the clicked tab has no elements to hide!"); - return; - } - - // Get the IDs that we should enable for this tab - var toToggle = $(e.target).data("elements").split(","); - - // Hide the tab content by default - hideTabTargets(); - - // Now show this tabs toggle elements - for(var c = 0; c < toToggle.length; c++){ - - // Show the items - $('#' + toToggle[c], this.$el).show(); - - // Re-render the panels under the item if necessary - rerenderPanels(toToggle[c]); - } - - }; - - // Wire up the function to show the appropriate tab - $('a[data-toggle="tab"]').on('shown', selectTab); - - // Show the first tab - $('.toggle-tab').first().trigger('shown'); - - // Make the tabs into tabs - $('#tabs', this.$el).tab(); - + function($, _, mvc){ + + var tabsInitialzed = []; + + /** + * The below defines the tab handling logic. + */ + + /** + * This hides the content associated with the tabs. + * + * The normal, auto-magical Bootstrap tab processing doesn't work for us since it requires a particular + * layout of HTML that we cannot use without converting the view entirely to simpleXML. So, we are + * going to handle it ourselves. + * @param {string} tabSetClass the + */ + var hideTabTargets = function(tabSetClass) { + + var tabs = $('a[data-elements]'); + + // If we are only applying this to a particular set of tabs, then limit the selector accordingly + if (typeof tabSetClass !== 'undefined' && tabSetClass) { + tabs = $('a.' + tabSetClass + '[data-elements]'); + } + + // Go through each toggle tab + for (var c = 0; c < tabs.length; c++) { + + // Hide the targets associated with the tab + var targets = $(tabs[c]).data("elements").split(","); + + for (var d = 0; d < targets.length; d++) { + $('#' + targets[d], this.$el).hide(); + } + } + }; + + /** + * Force a re-render of the panels with the given row ID. + * + * @param {string} row_id The ID of the row to force a rerender on + * @param {bool} force Force the tab to re-render even if it was already rendered once (defaults to true) + */ + var rerenderPanels = function(row_id, force){ + + // Set a default argument for dont_rerender_until_needed + if( typeof force === 'undefined'){ + force = true; + } + + // Don't do both if the panel was already rendered + if( !force && _.contains(tabsInitialzed, row_id) ){ + return; + } + + // Get the elements so that we can find the components to re-render + var elements = $('#' + row_id + ' .dashboard-element'); + + // Iterate the list and re-render the components so that they fill the screen + for(var d = 0; d < elements.length; d++){ + + // Determine if this is re-sizable + if( $('#' + row_id + ' .ui-resizable').length > 0){ + + var component = mvc.Components.get(elements[d].id); + + if(component){ + component.render(); + } + } + } + + // Remember that we initialized this tab + tabsInitialzed.push(row_id); + }; + + /** + * Handles the selection of a partiular tab. + * + * @param {*} e + */ + var selectTab = function (e) { + console.log("selectTab"); + // Update which tab is considered active + $('#tabs > li.active').removeClass("active"); + $(e.target).closest("li").addClass("active"); + + // clearTabControlTokens(); + setActiveTabToken(); + + // Stop if the tabs have no elements + if( $(e.target).data("elements") === undefined ){ + console.warn("Yikes, the clicked tab has no elements to hide!"); + return; + } + + // Determine if the set of tabs has a restriction on the classes to manipulate + var tabSet = null; + + if ($(e.target).data("tab-set") !== undefined) { + tabSet = $(e.target).data("tab-set"); + } + + // Get the IDs that we should enable for this tab + var toToggle = $(e.target).data("elements").split(","); + + // Hide the tab content by default + hideTabTargets(tabSet); + + // Now show this tabs toggle elements + for(var c = 0; c < toToggle.length; c++){ + + // Show the items + $('#' + toToggle[c], this.$el).show(); + + // Re-render the panels under the item if necessary + rerenderPanels(toToggle[c]); + } + + }; + /** * The code below handles the tokens that trigger when searches are kicked off for a tab. */ - - // Get the tab token for a given tab name + + /** + * Get the tab token for a given tab name + * @param {string} tab_name The name of the tab + */ var getTabTokenForTabName = function(tab_name){ - return tab_name; //"tab_" + - } - + return tab_name; + }; + // Get all of the possible tab control tokens var getTabTokens = function(){ - var tabTokens = []; - - var tabLinks = $('#tabs > li > a'); - - for(var c = 0; c < tabLinks.length; c++){ - tabTokens.push( getTabTokenForTabName( $(tabLinks[c]).data('token') ) ); - } - - return tabTokens; - } - - // Clear all but the active tab control tokens + var tabTokens = []; + + var tabLinks = $('#tabs > li > a'); + + for(var c = 0; c < tabLinks.length; c++){ + tabTokens.push( getTabTokenForTabName( $(tabLinks[c]).data('token') ) ); + } + + return tabTokens; + }; + + /** + * Clear all but the active tab control tokens + */ var clearTabControlTokens = function(){ - console.info("Clearing tab control tokens"); - - //tabsInitialzed = []; - var tabTokens = getTabTokens(); - var activeTabToken = getActiveTabToken(); - var tokens = mvc.Components.getInstance("submitted"); - - // Clear the tokens for all tabs except for the active one - for(var c = 0; c < tabTokens.length; c++){ - - if( activeTabToken !== tabTokens[c] ){ - tokens.set(tabTokens[c], undefined); - } - } - } - - // Get the tab control token for the active tab + console.info("Clearing tab control tokens"); + + //tabsInitialzed = []; + var tabTokens = getTabTokens(); + var activeTabToken = getActiveTabToken(); + var tokens = mvc.Components.getInstance("submitted"); + + // Clear the tokens for all tabs except for the active one + for(var c = 0; c < tabTokens.length; c++){ + + if( activeTabToken !== tabTokens[c] ){ + tokens.set(tabTokens[c], undefined); + } + } + }; + + /** + * Get the tab control token for the active tab + */ var getActiveTabToken = function(){ - return $('#tabs > li.active > a').data('token'); - } - - // Set the token for the active tab + return $('#tabs > li.active > a').data('token'); + }; + + /** + * Set the token for the active tab + */ var setActiveTabToken = function(){ - var activeTabToken = getActiveTabToken(); - - var tokens = mvc.Components.getInstance("submitted"); - - tokens.set(activeTabToken, ''); - } - + var activeTabToken = getActiveTabToken(); + var tokens = mvc.Components.getInstance("submitted"); + + if(activeTabToken){ + // Set each token if necessary + activeTabToken.split(",").forEach(function(token){ + + // If the token wasn't set, set it so that the searches can run + if(!tokens.toJSON()[token] || tokens.toJSON()[token] == undefined){ + tokens.set(token, ""); + } + }); + } + }; + + /** + * Handle the setting of the token for the clicked tab. + * @param {*} e + */ var setTokenForTab = function(e){ - - // Get the token for the tab - var tabToken = getTabTokenForTabName($(e.target).data('token')); - - // Set the token - var tokens = mvc.Components.getInstance("submitted"); - tokens.set(tabToken, ''); - - console.info("Set the token for the active tab (" + tabToken + ")"); - } - - $('a[data-toggle="tab"]').on('shown', setTokenForTab); - - // Wire up the tab control tokenization - var submit = mvc.Components.get("submit"); - - if( submit ){ - submit.on("submit", function() { - clearTabControlTokens(); - }); - } - - // Set the token for the selected tab - setActiveTabToken(); - -}); + // Get the token for the tab + var tabToken = getTabTokenForTabName($(e.target).data('token')); + + // Set the token + var tokens = mvc.Components.getInstance("submitted"); + tokens.set(tabToken, ''); + + console.info("Set the token for the active tab (" + tabToken + ")"); + }; + + /** + * Perform the initial setup for making the tabs work. + */ + var firstTimeTabSetup = function() { + $('a.toggle-tab').on('shown', setTokenForTab); + + // Wire up the function to show the appropriate tab + $('a.toggle-tab').on('click shown', selectTab); + + // Show the first tab in each tab set + $.each($('.nav-tabs'), function(index, value) { + $('.toggle-tab', value).first().trigger('shown'); + }); + + // Make the tabs into tabs + $('#tabs', this.$el).tab(); + + // Wire up the tab control tokenization + var submit = mvc.Components.get("submit"); + + if(submit){ + submit.on("submit", function() { + clearTabControlTokens(); + }); + } + + // Set the token for the selected tab + setActiveTabToken(); + }; + + firstTimeTabSetup(); +}); diff --git a/github_app_for_splunk/appserver/static/workflowdetails.js b/github_app_for_splunk/appserver/static/workflowdetails.js new file mode 100644 index 0000000..d9498f4 --- /dev/null +++ b/github_app_for_splunk/appserver/static/workflowdetails.js @@ -0,0 +1,118 @@ +require([ + "underscore", + "splunkjs/mvc", + "splunkjs/mvc/searchmanager", + "splunkjs/mvc/tableview", + "splunkjs/mvc/simplexml/ready!" +], function( + _, + mvc, + SearchManager, + TableView +) { + + // Set up search managers + var search2 = new SearchManager({ + id: "workflow_details", + preview: true, + cache: true, + search: mvc.tokenSafe("`github_webhooks` eventtype=\"GitHub::Workflow\" \"workflow_job.run_id\"=$workflow_id$| fields * | eval queued=if(action==\"queued\",_time,null), started=if(action==\"in_progress\",_time,null), completed=if(action==\"completed\",_time,null) | stats latest(workflow_job.conclusion) as status, latest(workflow_job.name) as Name, latest(queued) as queued, latest(started) as started, latest(completed) as completed by workflow_job.id | eval queueTime=toString(round(started-queued),\"Duration\"), runTime=toString(round(completed-started),\"Duration\"), totalTime=toString(round(completed-queued),\"Duration\"), status=if(status==\"null\",\"in_progress\",status) | rename workflow_job.id AS JobID | fields status, Name, JobID, queueTime, runTime, totalTime"), + earliest_time: mvc.tokenSafe("timeTkn.earliest$"), + latest_time: mvc.tokenSafe("timeTkn.latest$") + }); + + // Create a table for a custom row expander + var mycustomrowtable = new TableView({ + id: "table-customrow", + managerid: "workflow_details", + drilldown: "none", + drilldownRedirect: false, + el: $("#table-customrow") + }); + + // Define icons for the custom table cell + var ICONS = { + failure: "error", + in_progress: "question-circle", + success: "check-circle" + }; + + // Use the BaseCellRenderer class to create a custom table cell renderer + var CustomCellRenderer = TableView.BaseCellRenderer.extend({ + canRender: function(cellData) { + // This method returns "true" for the "range" field + return cellData.field === "status"; + }, + + // This render function only works when canRender returns "true" + render: function($td, cellData) { + console.log("cellData: ", cellData); + + var icon = "question"; + if(ICONS.hasOwnProperty(cellData.value)) { + icon = ICONS[cellData.value]; + } + $td.addClass("icon").html(_.template('', { + icon: icon, + status: cellData.value + })); + } + }); + + // Use the BasicRowRenderer class to create a custom table row renderer + var CustomRowRenderer = TableView.BaseRowExpansionRenderer.extend({ + canRender: function(rowData) { + console.log("RowData: ", rowData); + return true; + }, + + initialize: function(args){ + this._searchManager = new SearchManager({ + id: 'details-search-manager', + preview: false + }); + this._TableView = new TableView({ + id: 'ResultsTable', + managerid: 'details-search-manager', + drilldown: "all", + drilldownRedirect: true + }); + }, + + render: function($container, rowData) { + // Print the rowData object to the console + // console.log("RowData: ", rowData); + + var repoNameCell = _(rowData.cells).find(function (cell) { + return cell.field === 'Repository Name'; + }); + + + var workflowName = _(rowData.cells).find(function (cell) { + return cell.field === 'Workflow Name'; + }); + + var workflowIDCell = _(rowData.cells).find(function (cell) { + return cell.field === 'Run ID'; + }); + + this._searchManager.set({ search: '`github_webhooks` (workflow_run.id='+workflowIDCell.value+' OR workflow_job.run_id='+workflowIDCell.value+') | eval started=if(action=="requested", _time, null), completed=if(action=="completed", _time,null) | stats latest(workflow_run.conclusion) as Status, earliest(started) as Started, latest(completed) as Completed, latest(workflow_run.head_branch) as Branch, latest(workflow_run.event) as Trigger | eval Duration=tostring(Completed-Started, "Duration") | fields Status, Duration, Branch, Trigger | eval Details="Click here for Workflow Details" | transpose|rename column AS Details| rename "row 1" AS values'}); + // $container is the jquery object where we can put out content. + // In this case we will render our chart and add it to the $container + $container.append(this._TableView.render().el); + } + }); + + // Create an instance of the custom cell renderer, + // add it to the table, and render the table + var myCellRenderer = new CustomCellRenderer(); + mycustomrowtable.addCellRenderer(myCellRenderer); + mycustomrowtable.render(); + + // Create an instance of the custom row renderer, + // add it to the table, and render the table + var myRowRenderer = new CustomRowRenderer(); + mycustomrowtable.render(); + + +}); diff --git a/github_app_for_splunk/default/app.conf b/github_app_for_splunk/default/app.conf index 022cf47..57e3596 100644 --- a/github_app_for_splunk/default/app.conf +++ b/github_app_for_splunk/default/app.conf @@ -7,11 +7,11 @@ version = X.Y.Z [ui] is_visible = 1 -label = Github App for Splunk +label = GitHub App for Splunk [launcher] author = Doug Erkkila -description = Report on Activity and Audit Data from Github +description = Report on Activity and Audit Data from GitHub version = X.Y.Z [package] diff --git a/github_app_for_splunk/default/data/ui/nav/default.xml b/github_app_for_splunk/default/data/ui/nav/default.xml index a493681..e952452 100644 --- a/github_app_for_splunk/default/data/ui/nav/default.xml +++ b/github_app_for_splunk/default/data/ui/nav/default.xml @@ -1,14 +1,30 @@ \ No newline at end of file + + + + + + diff --git a/github_app_for_splunk/default/data/ui/views/1_system_health_monitor.xml b/github_app_for_splunk/default/data/ui/views/1_system_health_monitor.xml index 701d8c3..b8d12c4 100644 --- a/github_app_for_splunk/default/data/ui/views/1_system_health_monitor.xml +++ b/github_app_for_splunk/default/data/ui/views/1_system_health_monitor.xml @@ -137,9 +137,8 @@ Forks - | mstats avg(_value) prestats=true WHERE `github_collectd` AND metric_name="processes.fork_rate.value" AND host="$envTkn$" span=10s BY metric_name -| eval metric_name=mvindex(split(metric_name,"."),1) -| timechart avg(_value) as "Avg" span=10sec by metric_name + | mstats rate_avg("processes.fork_rate.value") as "Rate (Avg) /s" chart=true WHERE `github_collectd` host="$envTkn$" span=10s +| fields - _span* $timeTkn.earliest$ $timeTkn.latest$ diff --git a/github_app_for_splunk/default/data/ui/views/2_process_monitor.xml b/github_app_for_splunk/default/data/ui/views/2_process_monitor.xml index 802adb8..a9e8fbe 100644 --- a/github_app_for_splunk/default/data/ui/views/2_process_monitor.xml +++ b/github_app_for_splunk/default/data/ui/views/2_process_monitor.xml @@ -55,8 +55,135 @@ + + + + + + + + + + + + CPU (Kernel) + + | mstats rate_avg("processes.ps_cputime.syst") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=max chart.limit=top100 BY plugin_instance +| fields - _span* +| rename "_time /s" AS _time + -1h + + + + + + + + + + + + + + + CPU (Application) + + | mstats rate_avg("processes.ps_cputime.user") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=max chart.limit=top100 BY plugin_instance +| fields - _span* +| rename "_time /s" AS _time + -1h + + + + + + + + + + + + + + + + + I/O Operations (Read IOPS) + + | mstats rate_avg("processes.ps_disk_ops.read") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=max chart.limit=top100 BY plugin_instance +| fields - _span* +| rename "_time /s" AS _time + -1h + + + + + + + + + + + + + + I/O Operations (Write IOPS) + + | mstats rate_avg("processes.ps_disk_ops.write") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=max chart.limit=top100 BY plugin_instance +| fields - _span* +| rename "_time /s" AS _time + -1h + + + + + + + + + + + + + + + + Storage Traffic (Read) + + | mstats rate_avg("processes.ps_storage_octets.read") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=avg chart.limit=top100 BY plugin_instance +| fields - _span* +| rename "_time /s" AS _time + -1h + + + + + + + + + + + + + + + + Storage Traffic (Write) + + | mstats rate_avg("processes.ps_storage_octets.write") chart=true WHERE `github_collectd` host="$envTkn$" span=10s chart.useother=false chart.agg=avg chart.limit=top100 BY plugin_instance +| fields - _span* +| rename "_time /s" AS _time + -1h + + + + + + + diff --git a/github_app_for_splunk/default/data/ui/views/3_authentication_monitor.xml b/github_app_for_splunk/default/data/ui/views/3_authentication_monitor.xml new file mode 100644 index 0000000..f35698d --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/3_authentication_monitor.xml @@ -0,0 +1,314 @@ +
+ +
+ + + + -60m@m + now + + + + + All + * + * + host + host + + | mstats count prestats=true WHERE `github_collectd` AND metric_name="cpu.*" span=10s BY host +| dedup host | table host + -24h@h + now + + +
+ + + Authentication Totals + + + | mstats avg(_value) prestats=true WHERE `github_collectd` AND host="$envTkn$" AND metric_name="statsd.gauge.github/auth/result/*/*.value" span=10s BY metric_name +| eval metric_name=mvjoin(mvindex(split(mvindex(split(metric_name,"."),2),"/"),3,-1),".") +| timechart avg(_value) as "Avg" span=10sec by metric_name + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Authentication Rate + + + | mstats sum(_value) prestats=true WHERE `github_collectd` AND host="$envTkn$" AND metric_name="statsd.gauge.github/auth/result/*/*.value" span=1m BY metric_name +| eval metric_name=mvjoin(mvindex(split(mvindex(split(metric_name,"."),2),"/"),3,-1),".") +| timechart sum(_value) as "sum" span=1m by metric_name + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LDAP Authentications + + + | mstats avg(_value) prestats=true WHERE `github_collectd` AND host="$envTkn$" AND metric_name="statsd.gauge.github/ldap/authenticate/*/*.value" span=10s BY metric_name +| eval metric_name=mvjoin(mvindex(split(mvindex(split(metric_name,"."),2),"/"),3,-1),".") +| timechart avg(_value) as "Avg" span=10sec by metric_name + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LDAP Authentication Response Time + + + | mstats max(_value) prestats=true WHERE `github_collectd` AND host="$envTkn$" AND metric_name="statsd.latency.github/ldap/authenticate/*/runtime-percentile-90.value" span=1m BY metric_name +| eval metric_name=mvjoin(mvindex(split(mvindex(split(metric_name,"."),2),"/"),3),".") +| timechart max(_value) as "max" span=1m by metric_name + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LDAP Sync Totals + + + | mstats avg(_value) prestats=true WHERE `github_collectd` AND host="$envTkn$" AND metric_name="statsd.gauge.github/ldap/sync/*/total.value" span=10s BY metric_name +| eval metric_name=mvindex(split(mvindex(split(metric_name,"."),2),"/"),3) +| timechart avg(_value) as "Avg" span=10sec by metric_name + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LDAP Sync Runtime + + + | mstats max(_value) prestats=true WHERE `github_collectd` AND host="$envTkn$" AND metric_name="statsd.latency.github/ldap/sync/*/runtime-percentile-90.value" span=1m BY metric_name +| eval metric_name=mvjoin(mvindex(split(mvindex(split(metric_name,"."),2),"/"),3),".") +| timechart max(_value) as "max" span=1m by metric_name + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/github_app_for_splunk/default/data/ui/views/8_storage_monitor.xml b/github_app_for_splunk/default/data/ui/views/8_storage_monitor.xml new file mode 100644 index 0000000..96ecbbb --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/8_storage_monitor.xml @@ -0,0 +1,128 @@ +
+ +
+ + + + -60m@m + now + + + + + All + * + * + host + host + + | mstats count prestats=true WHERE `github_collectd` AND metric_name="cpu.*" span=10s BY host +| dedup host | table host + -24h@h + now + + +
+ + + Disk Usage (Root device) + + + | mstats avg(_value) as "Avg" WHERE `github_collectd` AND host="$envTkn$" AND metric_name="df.df_complex.*.value" AND plugin_instance=root span=10s BY metric_name +| eval disk_gb = Avg / 1024 / 1024 / 1024 +| search metric_name !="*.reserved*" +| eval metric_name=mvindex(split(metric_name,"."),2) +| timechart avg("disk_gb") as "Avg" span=10sec by metric_name + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Disk Usage (Data Device) + + + | mstats avg(_value) as "Avg" WHERE `github_collectd` AND host="$envTkn$" AND metric_name="df.df_complex.*.value" AND plugin_instance=data* span=10s BY metric_name +| eval disk_gb = Avg / 1024 / 1024 / 1024 +| search metric_name !="*.reserved*" +| eval metric_name=mvindex(split(metric_name,"."),2) +| timechart avg("disk_gb") as "Avg" span=10sec by metric_name + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/github_app_for_splunk/default/data/ui/views/api_config.xml b/github_app_for_splunk/default/data/ui/views/api_config.xml deleted file mode 100644 index c101422..0000000 --- a/github_app_for_splunk/default/data/ui/views/api_config.xml +++ /dev/null @@ -1,232 +0,0 @@ - - - - - -

GitHub Enterprise Audit Log Monitoring

-

This modular input makes an HTTPS request to the GitHub Enterprise's Audit Log REST API endpoint at a definable interval to fetch audit log data.

-

Prerequisites

-
    -
  • Splunk v8+
  • -
-

Installation

-
    -
  1. -

    Download the latest release of the Splunk Add-On for Github Enterprise Audit Logs from SplunkBase

    -
  2. -
  3. -

    Go to Apps > Manage Apps in the toolbar menu.

    -
  4. -
  5. -

    Use the "Install app from file" button to upload the spl file you downloaded from Splunkbase

    -
  6. -
  7. -

    Generate a Personal Access Token in GitHub Enterprise with the site_admin scope.

    -
  8. -
  9. -

    Under Settings > Data inputs, there should be a new option called Github Audit Log Monitoring, click "+ Add new"

    -
  10. -
  11. -

    Configure the Input by entering the necessary information in the input fields. Don't forget to define the Index for the data to be stored in. This option is under the "More settings" option.

    -
  12. -
  13. -

    Under Settings > Advanced Search, select Search Macros. You'll need to update the github_source macro to use the Index you assigned above.

    -
  14. -
-

Configuration

-

Personal Access Token Scope

-

The following are the required scopes for the personal access token allowing the module to fetch the audit log entries successfully:

-
    -
  • [x] admin:enterprise Full control of enterprises -
      -
    • [x] manage_billing:enterprise Read and write enterprise billing data -
    • -
    • [x] read:enterprise Read enterprise profile data -
    • -
    -
  • -
-

Input Fields

-
    -
  • -

    - name -

    -
      -
    • This is name of your instance. You can have multiple modular inputs running simultaneously. However, this is not a recommended behavior for this module.
    • -
    • Takes: alpha-numeric, white spaces and symbol characters
    • -
    • Example: GHE-enterprise-name -
    • -
    -
  • -
  • -

    - Hostname -

    -
      -
    • This is the hostname of your GitHub Enterprise instance. Make sure there are no trailing / in the URL provided. This could either be a FQDN or an IP address. Do not append any paths beyond the tld.
    • -
    • Example: https://api.github.com -
    • -
    -
  • -
  • -

    - Enterprise -

    -
      -
    • The enterprise name for which to fetch audit log events
    • -
    -
  • -
  • -

    - Personal Access Token -

    -
      -
    • This is your personal access token that you generate for your or a service account in GitHub Enterprise. This module requires that the personal access token be created with the site_admin scope. This is a very sensitive token so make sure to keep it secure at all times!
    • -
    • Security: The personal access token is encrypted and stored in Splunk's password storage. After you configure it the first time it will be replaced in Splunk's UI with a unique identifier. This identifier will be used by the module to fetch the personal access token before making the API request to GitHub Enterprise.
    • -
    • Takes: a 40 character token
    • -
    • Example: d0e117b6ad471der3rjdowcc401a95d09202119f -
    • -
    -
  • -
  • -

    - Event Types -

    -
      -
    • The audit log contains multiple event types. This field allows you to specify which events to include:
        -
      • web - returns web (non-Git) events
      • -
      • git - returns Git events
      • -
      • all - returns both web and Git events
      • -
      -
    • -
    • - More details -
    • -
    -
  • -
  • -

    - Maximum Entries Per Run -

    -
      -
    • The maximum number of events / entries to fetch each time the script runs. To understand how to calculate the maximum number of entries and interval to best fit your organization go to the Tweaking throughput section below.
    • -
    -
  • -
  • -

    - Verify Self-Signed Certificates -

    -
      -
    • This is a parameter passed to the get() method in the Requests library. If the checkbox is cheked then the SSL certificate will be verified like a browser does and Requests will throw a SSLError if it’s unable to verify the certificate. Uncheck this box if you are using self-signed certificates.
    • -
    -
  • -
  • -

    - Debug Mode -

    -
      -
    • The personal access token will be leaked in the splunkd logs. DO NOT ENABLE unless you are ready to update your personal access token.
    • -
    • If you are experiencing issues and the module is not operating as intended, you can enable this mode to seethe module's debugging information in the splunkd logs.
    • -
    -
  • -
  • -

    - Interval -

    -
      -
    • Takes a cron expression as defined in the Splunk docs.
    • -
    • Example: 30 * * * * -
        -
      • At minute 30 of every hour. For example, if you set this CRON job at 11:02, your job will begin running at 11:30, 12:30, 1:30, etc...
      • -
      -
    • -
    • Example: */5 * * * * -
        -
      • Every 5 minutes
      • -
      -
    • -
    • Example: 300 -
        -
      • Every 300 seconds or 5 minutes
      • -
      -
    • -
    -
  • -
-

Tweaking throughput

-

This modular input fetches events by calling the Enterprise Audit Log API. This API returns a maximum of 100 events / entries per page. The pagination algorithm can fetch events up to the maximum entries per run defined. It's important to tweak the maximum entries per run and interval parameters to have the ability to fetch your data in a timely manner and stay as close to real-time as possible.

-

- Example: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
EnterpriseEvents per minuteMaximum entries per runIntervalAPI calls usedGuidance
Evil-Corp10001000*/1 * * * *3000 per hourThe modular input should be able to handle this with ease.
Poizen-Inc50005000*/1 * * * *600 per hourWe are approaching API rate limit per hour. Depending on latency, 5000 entries = 50 API calls per minute. One minute might not be sufficient to fetch all this data.
Monsters-Inc100002000*/1 * * * *1200 per hourWe are approaching API rate limit per hour. Depending on latency, 5000 entries = 50 API calls per minute. One minute might not be sufficient to fetch all this data.
-

-

FAQs

-

How is my Personal Access Token secured?

-

On the first run the modular input will identify that your personal access token (PAT) is not encrypted. It will encrypt your PAT and store it in Splunk's credentials manager. It will replace the plaintext PAT with an md5 hash of an identifying key.

-

Your personal access token is only visible in plaintext from the time you configure the modular input instance until the first run.

-

Does the interval field access only cron syntax?

-

No, you can enter the number of seconds instead.

-

I enabled debug mode, what now?

-

If you've enabled debug mode be ready to change your personal access token because it will most likely be leaked into the Splunk logs in plain text.

-

Why can't I use a GitHub app instead of a personal access token?

-

GitHub apps cannot be installed on the enterprise level. The REST API requires enterprise admin privileges which are out of scope for GitHub apps.

-

Troubleshooting

-

Read logs in Splunk

-

You can use this search query to fetch all the logs belonging to this module when Debug Mode is enabled.

-
-          
-            index="_internal" source="/opt/splunk/var/log/splunk/splunkd.log" ghe_audit_log_monitoring
-
-        
-

Test the modular input for syntax problems

-

Run this test if you don't see anything in the logs (which is a highly unlikely scenario). This will display any syntax errors if there are any.

-
-          sudo $SPLUNK_HOME
-            /bin/splunk cmd python $SPLUNK_HOME
-            /etc/apps/ghe_audit_log_monitoring/bin/ghe_audit_log_monitoring.py
-
-        
-

Where are state files stored?

-

State files for enterprises are stored in this directory:

-
-          
-            $SPLUNK_HOME/etc/apps/ghe_audit_log_monitoring/state/
-
-        
- -
-
-
diff --git a/github_app_for_splunk/default/data/ui/views/audit_log_activity.xml b/github_app_for_splunk/default/data/ui/views/audit_log_activity.xml index 3a1dc99..b3d06d2 100644 --- a/github_app_for_splunk/default/data/ui/views/audit_log_activity.xml +++ b/github_app_for_splunk/default/data/ui/views/audit_log_activity.xml @@ -14,7 +14,7 @@ Events over time - `github_source` action | dedup document_id | timechart count by action + `github_source` action=* | timechart count by action $timeRng.earliest$ $timeRng.latest$ @@ -29,7 +29,7 @@ Total events - `github_source` action | dedup document_id | stats count + `github_source` action=* | stats count $timeRng.earliest$ $timeRng.latest$ @@ -48,10 +48,10 @@ @@ -61,7 +61,7 @@ - `github_source` | rex field=actor_location "\{\'country_code\'\: \'(?<iso2>[A-Z]{2})\'" | stats count by iso2 | lookup geo_attr_countries iso2 OUTPUT country | append [ | inputlookup geo_attr_countries] | dedup country | fillnull value=0 | fields+ count, country, geom | geom geo_countries featureIdField="country" + `github_source` | rename actor_location.country_code AS iso2 | stats count by iso2 | lookup geo_attr_countries iso2 OUTPUT country | append [ | inputlookup geo_attr_countries] | dedup country | fillnull value=0 | fields+ count, country, geom | geom geo_countries featureIdField="country" $timeRng.earliest$ $timeRng.latest$ 1 @@ -99,7 +99,7 @@ - `github_source` action | rex field=actor_location "\{\'country_code\'\: \'(?<iso2>[A-Z]{2})\'" | stats count by iso2 | lookup geo_attr_countries iso2 OUTPUT country | fields country, count + `github_source` action=* | rename actor_location.country_code AS iso2 | stats count by iso2 | lookup geo_attr_countries iso2 OUTPUT country | fields country, count $timeRng.earliest$ $timeRng.latest$ 1 @@ -120,7 +120,7 @@ Top 5 event types - `github_source` action | dedup document_id | stats count by action | sort 5 - count + `github_source` action=* | stats count by action | sort 5 - count $timeRng.earliest$ $timeRng.latest$ @@ -138,7 +138,7 @@ Top 5 active users - `github_source` action | dedup document_id | stats count by actor | sort 5 - count + `github_source` action=* | stats count by actor | sort 5 - count $timeRng.earliest$ $timeRng.latest$ @@ -154,7 +154,7 @@ Events per org - `github_source` action | dedup document_id | stats count by org + `github_source` action=* | stats count by org $timeRng.earliest$ $timeRng.latest$ @@ -168,7 +168,7 @@ Workflow runs - `github_source` | dedup document_id | stats count by conclusion + `github_source` | stats count by conclusion $timeRng.earliest$ $timeRng.latest$ @@ -183,7 +183,7 @@ Top 10 active repositories - `github_source` | dedup document_id | rename repo as repository | stats count by repository | sort 10 - count + `github_source` | rename repo as repository | stats count by repository | sort 10 - count $timeRng.earliest$ $timeRng.latest$ diff --git a/github_app_for_splunk/default/data/ui/views/code_scanning_overview.xml b/github_app_for_splunk/default/data/ui/views/code_scanning_overview.xml new file mode 100644 index 0000000..eeaab84 --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/code_scanning_overview.xml @@ -0,0 +1,206 @@ +
+ + + + `github_webhooks` (eventtype="GitHub::CodeScanning" OR eventtype="GitHub::Push") | eval action='action', tool=if(isnotnull('alert.tool.name'),'alert.tool.name','unknown'), repository=if(isnotnull('repository.name'),'repository.name','unknown'), severity=if(isnotnull('alert.rule.security_severity_level'),'alert.rule.security_severity_level','none'), create_time=if(isnotnull('alert.created_at'),'alert.created_at','unknown'), received_time='_time', alert_url=if(isnotnull('alert.html_url'),'alert.html_url','unknown'), eventtype='eventtype', created=strptime(create_time, "%Y-%m-%dT%H:%M:%S%Z"), resolved_at=case('alert.dismissed_at' != "null", 'alert.dismissed_at', isnotnull('alert.fixed_at'), 'alert.fixed_at', isnotnull('alert.resolved_at'),'alert.resolved_at', 1=1, _time), duration = toString(round(strptime(resolved_at, "%Y-%m-%dT%H:%M:%S") - strptime(create_time, "%Y-%m-%dT%H:%M:%S"))), duration_str=tostring(avg(duration), "duration") + + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + +
+ + + + -24h@h + now + + + + + tool + tool + " + " + + | table tool | dedup tool + + All + * + * + + + + All + * + * + " + " + , + repository + repository + + | dedup repository | table repository + + +
+ + + Mean Time to Resolution (MTTR) + + + | search eventtype="GitHub::CodeScanning" (action=fixed OR action=closed_by_user) tool=$tool_name$ repository=$repoTkn$ +| eval action=action, , repository=if(isnotnull('repository.name'),'repository.name','unknown') +| eval age = avg(duration) +| appendpipe [ stats avg(age) as totalTime ] +| eval mttr = toString(round(totalTime), "duration"), clean_mttr = replace (mttr , "\+" , " days, ") +| stats max(clean_mttr) + + + + + + + + Created + + + | search tool=$tool_name$ repository=$repoTkn$ action="created" | stats count + + + + + + + + + Fixed + + + | search tool=$tool_name$ repository=$repoTkn$ action="fixed" | stats count + + + + + + + + Reopened + + + | search tool=$tool_name$ repository=$repoTkn$ action="reopened" | stats count + + + + + + + + + + Alert Found/Fixed Ratio + + + | search tool=$tool_name$ repository=$repoTkn$ (action=created OR action=fixed) +| timechart count(_raw) by action +| accum created +| accum fixed +| rename created as "Found" +| rename fixed as "Fixed" + + + + + + + + + + Commit/Alert Ratio + + + | search (eventtype="GitHub::Push" repository=$repoTkn$) OR ((action=created OR action=reopened) tool=$tool_name$ repository=$repoTkn$ ) +| timechart count(_raw) by eventtype +| accum "GitHub::Push" +| accum "GitHub::CodeScanning" +| rename GitHub::Push as "Pushes" +| rename GitHub::CodeScanning as "Code Scanning Alerts" + + + + + + + + + + + + + + New Alerts by Tool + + + | search tool=$tool_name$ repository=$repoTkn$ (action=created OR action=appeared_in_branch) | timechart count(_raw) by tool + + + + + + + + + + + +
+ Fixed Alerts + + | search (action=fixed OR action=closed_by_user) repository=$repoTkn$ tool=$tool_name$ +|eval clean_duration = replace (duration_str , "\+" , " days, ") +| table repository, tool, alert_url,clean_duration +| rename repository AS "Repository" clean_duration AS "Time to Resolution",tool AS "Tool", alert_url AS "Alert URL" +| sort -"Time to Resolution" + + + +
+
+
+ + + + Alerts by Severity + + | search (action=created OR action=reopened) repository=$repoTkn$ tool=$tool_name$ | chart usenull=f count over repository by severity + + + + + + + + + + + +
+
+ + + Alerts by Repo + + | search (action=created OR action=reopened) repository=$repoTkn$ tool=$tool_name$| chart usenull=f count over repository by tool + + + + + + + + + +
+
+
+ diff --git a/github_app_for_splunk/default/data/ui/views/dependabot_alerts.xml b/github_app_for_splunk/default/data/ui/views/dependabot_alerts.xml new file mode 100644 index 0000000..3496568 --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/dependabot_alerts.xml @@ -0,0 +1,181 @@ +
+ + + + `github_webhooks` eventtype="GitHub::VulnerabilityAlert" | eval action='action', repository=if(isnotnull('repository.name'),'repository.name','unknown'), severity=if(isnotnull('alert.severity'),'alert.severity','none'), create_time=if(isnotnull('alert.created_at'),'alert.created_at','unknown'), received_time='_time', alert_url=if(isnotnull('alert.external_reference'),'alert.external_reference','unknown'), eventtype='eventtype', created=strptime(create_time, "%Y-%m-%dT%H:%M:%S%Z"), resolved_at=case('alert.dismissed_at' != "null", 'alert.dismissed_at', isnotnull('alert.fixed_at'), 'alert.fixed_at', isnotnull('alert.resolved_at'),'alert.resolved_at', 1=1, _time), duration = toString(round(strptime(resolved_at, "%Y-%m-%dT%H:%M:%S") - strptime(create_time, "%Y-%m-%dT%H:%M:%S"))), duration_str=tostring(avg(duration), "duration") + + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + +
+ + + + -24h@h + now + + + + + All + * + * + " + " + , + repository + repository + + | dedup repository | table repository + + + + + severity + severity + " + " + + | table severity | dedup severity + + All + * + * + +
+ + + + Mean Time to Resolution (MTTR) + + | search severity=$severity_label$ repository=$repoTkn$ action="resolve" + | eval age = avg(duration) + | appendpipe [ stats avg(age) as totalTime ] + | eval mttr = toString(round(totalTime), "duration"), clean_mttr = replace (mttr , "\+" , " days, ") + | stats max(clean_mttr) + + + + + + + + + + + Created + + | search severity=$severity_label$ repository=$repoTkn$ action="create" | stats count + + + + + + + + + + Fixed + + | search severity=$severity_label$ repository=$repoTkn$ (action="resolve") | stats count + + + + + + + + + Dismissed + + | search severity=$severity_label$ repository=$repoTkn$ (action="dismiss") | stats count + + + + + + + + + + + Alert Found/Fixed Ratio + + | search severity=$severity_label$ repository=$repoTkn$ (action=create OR action=resolve OR action=dismiss) +| timechart count(_raw) by action +| accum create +| accum resolve +| rename create as "Found" +| rename resolve as "Fixed" +| rename dismiss as "Dismissed" + + + + + + + + + + + Vulnerabilities by Repo + + | search severity=$severity_label$ repository=$repoTkn$ action=create | chart count by repository + + + + + + + + + + + + New Alerts by Severity + + | search severity=$severity_label$ repository=$repoTkn$ (action=create) | timechart count(_raw) by severity + + + + + + + + + + + + + Fixed Alerts + + | search (action=resolve OR action=dismiss) repository=$repoTkn$ severity=$severity_label$ +| table action, repository, severity, alert_url,duration_str +| rename action AS "Action", repository AS "Repository" duration_str AS "Time to Resolution",severity AS "Severity", alert_url AS "Alert URL" +| sort -"Time to Resolution" + + + +
+
+
+ + + + Alerts by Repo + + | search (action=create) repository=$repoTkn$ severity=$severity_label$| chart usenull=f count over repository by severity + + + + + + + + + +
+
+
+
\ No newline at end of file diff --git a/github_app_for_splunk/default/data/ui/views/integration_overview.xml b/github_app_for_splunk/default/data/ui/views/integration_overview.xml index 04fad00..f3d6fdb 100644 --- a/github_app_for_splunk/default/data/ui/views/integration_overview.xml +++ b/github_app_for_splunk/default/data/ui/views/integration_overview.xml @@ -27,7 +27,7 @@ - index=_internal component=ExecProcessor "TA_splunk_ghe_audit_log_monitoring" "stream_events(): Fetched:" OR "API Rate limits"| rex "\'x_rl_limit\'\: \'(?<x_rl_limit>\d+?)\', \'x_rl_remainig\'\: \'(?<x_rl_remaining>\d+?)\', 'x_rl_reset_timestamp\'\: \'(?<x_rl_reset_timestamp>\d+?)\', \'x_rl_used\'\: \'(?<x_rl_used>\d+?)\'" | rex "stream_events\(\)\: Fetched: (?<event_count>\d+?) events" | timechart sum(event_count) as fetched_event max(x_rl_limit) as x_rl_limit, min(x_rl_remaining) as x_rl_remaining, max(x_rl_used) as x_rl_used | stats max(x_rl_limit) as "Rate Limit", avg(x_rl_used) as "Average Rate Limit Used", min(fetched_event) as "Minimum Fetched Events", avg(fetched_event) as "Average Fetched Events", max(fetched_event) as "Maximum Fetched Events" + index=_internal component=ExecProcessor "github-audit-log-monitoring-add-on-for-splunk" "stream_events(): Fetched:" OR "API Rate limits"| rex "\'x_rl_limit\'\: \'(?<x_rl_limit>\d+?)\', \'x_rl_remainig\'\: \'(?<x_rl_remaining>\d+?)\', 'x_rl_reset_timestamp\'\: \'(?<x_rl_reset_timestamp>\d+?)\', \'x_rl_used\'\: \'(?<x_rl_used>\d+?)\'" | rex "stream_events\(\)\: Fetched: (?<event_count>\d+?) events" | timechart sum(event_count) as fetched_event max(x_rl_limit) as x_rl_limit, min(x_rl_remaining) as x_rl_remaining, max(x_rl_used) as x_rl_used | stats max(x_rl_limit) as "Rate Limit", avg(x_rl_used) as "Average Rate Limit Used", min(fetched_event) as "Minimum Fetched Events", avg(fetched_event) as "Average Fetched Events", max(fetched_event) as "Maximum Fetched Events" -24h@h now 1 @@ -57,7 +57,7 @@ Rate Limit Usage - index=_internal component=ExecProcessor "TA_splunk_ghe_audit_log_monitoring" "API Rate limits"| rex "\'x_rl_limit\'\: \'(?<x_rl_limit>\d+?)\', \'x_rl_remainig\'\: \'(?<x_rl_remaining>\d+?)\', 'x_rl_reset_timestamp\'\: \'(?<x_rl_reset_timestamp>\d+?)\', \'x_rl_used\'\: \'(?<x_rl_used>\d+?)\'" | timechart max(x_rl_limit) as "Rate Limit", min(x_rl_remaining) as "Rate Limit Remaining", max(x_rl_used) as "Rate Limit Used" + index=_internal component=ExecProcessor "github-audit-log-monitoring-add-on-for-splunk" "API Rate limits"| rex "\'x_rl_limit\'\: \'(?<x_rl_limit>\d+?)\', \'x_rl_remainig\'\: \'(?<x_rl_remaining>\d+?)\', 'x_rl_reset_timestamp\'\: \'(?<x_rl_reset_timestamp>\d+?)\', \'x_rl_used\'\: \'(?<x_rl_used>\d+?)\'" | timechart max(x_rl_limit) as "Rate Limit", min(x_rl_remaining) as "Rate Limit Remaining", max(x_rl_used) as "Rate Limit Used" $timeRng.earliest$ $timeRng.latest$ 1 @@ -101,7 +101,7 @@ Fetched Events - index=_internal component=ExecProcessor "TA_splunk_ghe_audit_log_monitoring" "stream_events(): Fetched:" | rex "stream_events\(\)\: Fetched: (?<event_count>\d+?) events" | timechart sum(event_count) as fetched_event + index=_internal component=ExecProcessor "github-audit-log-monitoring-add-on-for-splunk" "stream_events(): Fetched:" | rex "stream_events\(\)\: Fetched: (?<event_count>\d+?) events" | timechart sum(event_count) as fetched_event $timeRng.earliest$ $timeRng.latest$ 1 diff --git a/github_app_for_splunk/default/data/ui/views/repository_audit.xml b/github_app_for_splunk/default/data/ui/views/repository_audit.xml index d2a7905..ca6ff2e 100644 --- a/github_app_for_splunk/default/data/ui/views/repository_audit.xml +++ b/github_app_for_splunk/default/data/ui/views/repository_audit.xml @@ -53,13 +53,13 @@ @@ -95,9 +95,9 @@ - Repository Action Details + Repository Workflow Details - Clicking an Action run will take you to Github to view the Action Workflow + Clicking an Workflow run will take you to GitHub to view the Workflow `github_source` action IN("workflows.completed*") repo="*" | stats latest(conclusion) as status by org, actor, name, repo, head_branch, workflow_run_id $timeRng.earliest$ diff --git a/github_app_for_splunk/default/data/ui/views/secret_scanning_overview.xml b/github_app_for_splunk/default/data/ui/views/secret_scanning_overview.xml new file mode 100644 index 0000000..1cdf640 --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/secret_scanning_overview.xml @@ -0,0 +1,173 @@ +
+ + + + `github_webhooks` eventtype="GitHub::SecretScanning" | eval action='action', enterprise=if(isnotnull('enterprise.name'),'enterprise.name','unknown'), organization=if(isnotnull('organization.login'),'organization.login','unknown'), repository=if(isnotnull('repository.name'),'repository.name','unknown'), secret_type=if(isnotnull('alert.secret_type'),'alert.secret_type','unknown'), resolution=if(isnotnull('alert.resolution'),'alert.resolution','unknown'), create_time=if(isnotnull('alert.created_at'),'alert.created_at','unknown'), created=strptime(create_time, "%Y-%m-%dT%H:%M:%S%Z"), resolved_at=case('alert.dismissed_at' != "null", 'alert.dismissed_at', isnotnull('alert.fixed_at'), 'alert.fixed_at', isnotnull('alert.resolved_at'),'alert.resolved_at', 1=1, _time), duration = toString(round(strptime(resolved_at, "%Y-%m-%dT%H:%M:%S") - strptime(create_time, "%Y-%m-%dT%H:%M:%S"))), duration_str=tostring(avg(duration), "duration"),'alert.resolved_at','unknown'), resolved_by=if(isnotnull('alert.resolved_by.login'),'alert.resolved_by.login','unknown'), url='alert.html_url' + + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + +
+ + + + -24h@h + now + + + + + secret_type + secret_type + " + " + + | table secret_type | dedup secret_type + + All + * + * + + + + All + * + * + " + " + , + organization + organization + + | dedup organization | table organization + + + + + All + * + * + " + " + , + repository + repository + + | dedup repository | table repository + + +
+ + + + Mean Time To Resolution (MTTR) + + | search repository=$repoTkn$ organization=$orgTkn$ secret_type=$secret_type$ action="resolved" + | eval age = avg(duration) + | appendpipe [ stats avg(age) as totalTime ] + | eval mttr = toString(round(totalTime), "duration"), clean_mttr = replace (mttr , "\+" , " days, ") + | stats max(clean_mttr) + + + + + + + + + + Found Secrets + + | search repository=$repoTkn$ organization=$orgTkn$ secret_type=$secret_type$ action="created" | stats count + + + + + + + + + + Fixed Secrets + + | search repository=$repoTkn$ organization=$orgTkn$ secret_type=$secret_type$ action="resolved" | stats count + + + + + + + + + + + Secrets by Type + + | search repository=$repoTkn$ organization=$orgTkn$ secret_type=$secret_type$ action="created" | chart count by secret_type + + + + + + + + + Secrets by Repository + + | search repository=$repoTkn$ organization=$orgTkn$ secret_type=$secret_type$ action="created" | chart count by repository + + + + + + + + + Secrets Found/Fixed Ratio + + | search repository=$repoTkn$ organization=$orgTkn$ secret_type=$secret_type$ (action=created OR action=resolved) +| timechart count(_raw) by action +| accum created +| accum resolved +| rename created as "Found" +| rename resolved as "Fixed" + + + + + + + + + + + +
+ Fixed Secrets + + | search action=resolved repository=$repoTkn$ organization=$orgTkn$ secret_type=$secret_type$ +| eval mttr = toString(round(duration), "duration"), clean_mttr = replace (mttr , "\+" , " days, ") +| table secret_type, organization, repository, resolution, resolved_by, clean_mttr +| rename secret_type as "Secret Type", organization as "Organization", repository as "Repository", resolution as "Resolution", resolved_by as "Resolved By", clean_mttr as "Time to Resolution" + + + + +
+
+
+ + + + Found Secrets + + | search action=created repository=$repoTkn$ organization=$orgTkn$ secret_type=$secret_type$ | table secret_type, organization, repository, url, create_time + | rename secret_type as "Secret Type", organization as "Organization", repository as "Repository", url as "URL", create_time as "Created At" + + + +
+
+
+ diff --git a/github_app_for_splunk/default/data/ui/views/security_alert_overview.xml b/github_app_for_splunk/default/data/ui/views/security_alert_overview.xml new file mode 100644 index 0000000..bb2742f --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/security_alert_overview.xml @@ -0,0 +1,261 @@ +
+ + + + `github_webhooks` alert.created_at=* + | eval type=case((eventtype="GitHub::CodeScanning"), "Code Scanning Alert", (eventtype="GitHub::VulnerabilityAlert"), "Dependabot Alert", (eventtype="GitHub::SecretScanning"), "Secret Scanning Alert") + | eval url=case((eventtype="GitHub::CodeScanning"), 'alert.html_url', (eventtype="GitHub::VulnerabilityAlert"), 'repository.html_url'+"/security/dependabot/"+'alert.number', (eventtype="GitHub::SecretScanning"), 'alert.html_url') + | eval reason=case((type="Dependabot Alert"),'alert.affected_package_name',(type="Code Scanning Alert"), 'alert.rule.name', (type="Secret Scanning Alert"), 'alert.secret_type'), id=case((type="Dependabot Alert"),'alert.external_identifier',(type="Code Scanning Alert"), 'alert.rule.id', (type="Secret Scanning Alert"), 'alert.number'), severity=case((type="Dependabot Alert"),'alert.severity',(type="Code Scanning Alert"), 'alert.rule.security_severity_level', (type="Secret Scanning Alert"), "high"), repository = 'repository.full_name' + | stats latest(action) as status, earliest(alert.created_at) as created_at, latest(alert.number) as number by repository, reason, id, type, severity, url + | eval source=type + | eval age = toString(round(now() - strptime(created_at, "%Y-%m-%dT%H:%M:%S")),"Duration") + | search severity IN("*") status IN("*") type IN("*") + | sort -age + + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + +
+ + + + -30d@d + now + + + + + All + * + * + " + " + , + repository.name + repository.name + + `github_webhooks` alert.created_at=* | dedup repository.name | table repository.name + $timeTkn.earliest$ + $timeTkn.latest$ + + +
+ + + Open Alerts By Severity + + + | search status IN("create","created") | stats count by severity + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Open Alerts By Repository + + + | search status IN("create","created") | stats count by repository + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Open Alerts by Type + + + | search status IN("create","created") | stats count by type + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Resolved Alert Count + + + | search status IN("dismiss","resolve","resolved","closed_by_user","fixed")| stats count + + + + + + + + + + + + + + + + + + + + + + + + + Alert Details + + + All + * + * + + |dedup type| table type + + type + type + " + " + , + + + + All + * + * + + |dedup severity| table severity + + severity + severity + " + " + , + + + + All + * + * + + |dedup status| table status + + status + status + " + " + , + + + + |search severity IN($severityTkn$) status IN($statusTkn$) type IN($typeTkn$) | sort -age + + repository, reason, id, type,severity,status, created_at, age + + $row.url|n$ + + + + + + + + + + + {"critical":#DC4E41,"high":#F1813F,"moderate":#F8BE34, "medium":#F8BE34} + +
+
+
+
\ No newline at end of file diff --git a/github_app_for_splunk/default/data/ui/views/value_stream_analytics.xml b/github_app_for_splunk/default/data/ui/views/value_stream_analytics.xml index bfe10e7..acf1518 100644 --- a/github_app_for_splunk/default/data/ui/views/value_stream_analytics.xml +++ b/github_app_for_splunk/default/data/ui/views/value_stream_analytics.xml @@ -1,6 +1,13 @@ -
+ + + + `github_webhooks` (eventtype="GitHub::Issue" (action IN("opened","milestoned")) OR (action="labeled" AND label.name IN("in progress","to do"))) OR (eventtype="GitHub::Push" issueNumber=*) OR (eventtype="GitHub::PullRequest" action IN("opened","closed") issueNumber=*) repository.name IN("$repoTkn$") issueNumber!=9 | eval openTime=if(action=="opened",_time,NULL) | eval inProgressTime=if(action=="labeled",if('label.name'=="to do",_time,NULL),if(action=="milestoned",_time,NULL)) | eval workTime=if(action="labeled",if('label.name'="in progress",_time,NULL),if(eventtype=="GitHub::Push",_time,NULL)) | eval mergeTime=if(eventtype="GitHub::PullRequest",if(action=="opened",_time,NULL),if(eventtype="GitHub::Push",if(ref="refs/heads/main",_time,NULL),NULL)) | eval reviewTime=if(eventtype="GitHub::PullRequest",if('pull_request.merged'="true",_time,NULL),if(eventtype="GitHub::Push",if(ref="refs/heads/main",_time,NULL),NULL)) | eval sha=after | join type=left max=0 sha [ search `github_webhooks` eventtype="GitHub::Workflow" | eval sha='workflow_job.head_sha' | stats min(_time) as startTestTime, max(_time) as endTestTime by sha, workflow_job.id | eval testTimeDiff=endTestTime-startTestTime] | eval release='milestone.title' | join type=left release [search `github_webhooks` eventtype="GitHub::Release" | eval release='release.tag_name' | stats max(_time) as releaseTime by release] | stats max(issue.title) as issue.title, latest(milestone.title) as release, min(openTime) as opened, min(inProgressTime) as in_progress, min(workTime) as working , max(mergeTime) as merge, max(reviewTime) as review, avg(testTimeDiff) as avgTestDuration, max(endTestTime) as endTestTime, max(releaseTime) as releaseTime by repository.name,issueNumber | search opened=* + + $timeTkn.earliest$ + $timeTkn.latest$ + -
+
@@ -14,8 +21,8 @@ repository.name `github_webhooks` eventtype="GitHub::Push"|dedup repository.name| table repository.name - -30d@d - now + $timeTkn.earliest$ + $timeTkn.latest$ All * @@ -25,14 +32,38 @@ "
+ + + Total Time + + + | eval totalTimeDiff=releaseTime-opened | stats avg(totalTimeDiff) as totalTime | eval totalTime=toString(round(totalTime),"Duration") + + + + + + + + + + + + + + + + + + + + Step 1: Time to Accept - - `github_webhooks` (eventtype="GitHub::Issue" (action IN("opened","milestoned")) OR (action="labeled" AND label.name="in progress")) OR (eventtype="GitHub::Push" issueNumber=*) repository.name IN($repoTkn$) | eval openTime=if(action=="opened",_time,NULL) | eval inProgressTime=if(action=="labeled",_time,if(action="milestoned",_time,NULL)) | eval workTime=if(eventtype=="GitHub::Push",_time,NULL) | stats min(openTime) as opened, min(inProgressTime) as in_progress, min(workTime) as working by repository.name,issueNumber | search opened=* | eval acceptTimeDiff=in_progress-opened | eval workingTimeDiff=working-in_progress | search acceptTimeDiff=* | stats avg(acceptTimeDiff) as acceptTime | eval acceptTime=toString(round(acceptTime),"Duration") - $timeTkn.earliest$ - $timeTkn.latest$ + + | eval acceptTimeDiff=in_progress-opened | stats avg(acceptTimeDiff) as acceptTime | eval acceptTime=toString(round(acceptTime),"Duration") @@ -41,10 +72,50 @@ Step 2: Time to Work - - `github_webhooks` (eventtype="GitHub::Issue" (action IN("opened","milestoned")) OR (action="labeled" AND label.name="in progress")) OR (eventtype="GitHub::Push" issueNumber=*) repository.name IN($repoTkn$) | eval openTime=if(action=="opened",_time,NULL) | eval inProgressTime=if(action=="labeled",_time,if(action="milestoned",_time,NULL)) | eval workTime=if(eventtype=="GitHub::Push",_time,NULL) | stats min(openTime) as opened, min(inProgressTime) as in_progress, min(workTime) as working by repository.name,issueNumber | search opened=* | eval acceptTimeDiff=in_progress-opened | eval workingTimeDiff=working-in_progress | search workingTimeDiff>0 | stats avg(workingTimeDiff) as workingTime | eval workingTime=toString(round(workingTime),"Duration") - $timeTkn.earliest$ - $timeTkn.latest$ + + | eval workingTimeDiff=working-in_progress | search workingTimeDiff>0 | stats avg(workingTimeDiff) as workingTime | eval workingTime=toString(round(workingTime),"Duration") + + + + + + + Step 3: Time to Merge + + + | eval mergeTimeDiff=merge-working | stats avg(mergeTimeDiff) as mergeTime | eval mergeTime=toString(round(mergeTime),"Duration") + + + + + + + + + Step 4: Time to Review + + + | eval reviewTimeDiff=review-merge | stats avg(reviewTimeDiff) as reviewTime | eval reviewTime=toString(round(reviewTime),"Duration") + + + + + + + Step 5: Time to Test + + + | eval testTimeDiff=endTestTime-review | stats avg(testTimeDiff) as testTime | eval testTime=toString(round(testTime),"Duration") + + + + + + + Step 6: Time to Release + + + | eval releaseTimeDiff=releaseTime-endTestTime | stats avg(releaseTimeDiff) as releaseTime | eval releaseTime=toString(round(releaseTime),"Duration") @@ -54,11 +125,8 @@ - - `github_webhooks` repository.name IN($repoTkn$) (eventtype="GitHub::Issue" (action IN("opened","milestoned")) OR (action="labeled" AND label.name="in progress")) OR (eventtype="GitHub::Push" issueNumber=*) | eval openTime=if(action=="opened",_time,NULL) | eval inProgressTime=if(action=="labeled",_time,if(action="milestoned",_time,NULL)) | eval workTime=if(eventtype=="GitHub::Push",_time,NULL) | stats max(issue.title) as issue.title, min(openTime) as opened, min(inProgressTime) as in_progress, max(workTime) as working by repository.name,issueNumber | search opened=* | eval acceptTimeDiff=in_progress-opened | eval workingTimeDiff=working-in_progress | eval acceptTime=toString(round(acceptTimeDiff),"Duration"), workingTime=toString(round(workingTimeDiff),"Duration") | fields repository.name, issueNumber, issue.title, acceptTime, workingTime - $timeTkn.earliest$ - $timeTkn.latest$ - 1 + + | eval acceptTimeDiff=in_progress-opened | eval workingTimeDiff=working-in_progress | eval mergeTimeDiff=merge-working | eval reviewTimeDiff=review-merge | eval releaseTimeDiff=releaseTime-endTestTime | eval totalTimeDiff=releaseTime-opened | eval acceptTime=toString(round(acceptTimeDiff),"Duration"), workingTime=toString(round(workingTimeDiff),"Duration"), mergeTime=toString(round(mergeTimeDiff),"Duration"),reviewTime=toString(round(reviewTimeDiff),"Duration"), testTime=toString(round(endTestTime-review),"Duration"), testDuration=toString(round(avgTestDuration),"Duration"), releaseTime=toString(round(releaseTimeDiff),"Duration"), totalTime=toString(round(totalTimeDiff),"Duration") | eval opened=strftime(opened,"%m/%d/%Y %H:%M") | fields repository.name, issueNumber, issue.title, opened, acceptTime, workingTime, mergeTime, reviewTime, testTime, testDuration, releaseTime, totalTime @@ -71,4 +139,4 @@
- + \ No newline at end of file diff --git a/github_app_for_splunk/default/data/ui/views/webhook_config.xml b/github_app_for_splunk/default/data/ui/views/webhook_config.xml deleted file mode 100644 index e72e7c9..0000000 --- a/github_app_for_splunk/default/data/ui/views/webhook_config.xml +++ /dev/null @@ -1,71 +0,0 @@ - - - - - -

Using Github Webhooks

-

Github Webhooks are a great way to collect rich information as it occurs. You can easily enable webhooks within the Github UI and can even select specific actions on which to trigger a webhook call to Splunk. This is only available at the Organization level and will require this to be done for each Org as desired. To do so, you'll need to configure Splunk as a receiver and then setup the webhooks within Github.

-

Configuring Splunk to receive Webhooks

-

Splunk's HTTP Event Collector (HEC) is a quick and easy endpoint built to receive data from other producers like Github.

-

Steps -

    -
  1. Under Settings > Data Inputs, click HTTP Event Collector
  2. -
  3. Assuming HEC is enabled, click the New Token button
  4. -
  5. You can provide any name you want, however it is recommended to use something that will easily identify it like github_webhooks or similar based on your company's naming conventions, if they exist.
  6. -
  7. Unless required by your SPlunk administrator, the rest of this page can be left as is and continue onto the next step.
  8. -
  9. You'll want to click select for Source Type, and a new selection box will appear below that.
  10. -
  11. Under the Application option, there should be an entry for github_json, however you may need to use the little search bar to find it.
  12. -
  13. For App Context, you'll want to select Splunk App for Github
  14. -
  15. Next select the index created for this data. If none exist, create a new Index. Names like github or the like are recommended, depending on corporate naming conventions.
  16. -
  17. Lastly, click the Review button and confirm the data is correct and hit Submit.
  18. -

-

Your token is now available to collect data, however we'll need to enable that token to allow Query String Authentication using that token. For this, you'll need command line access to your Splunk environment or be using a deployment server to deploy apps to Splunk.

-

To enable Query String Authentication, you'll need to update the inputs.conf file within the Splunk App for Github local directory. In that file, there will be a stanza with the name and value of the token you created. At the end of that stanza, you'll need to add allowQueryStringAuth = true and then restart Splunk. This is best done with the help of your Splunk team, so please reach out to them for assistance on this step.

-

Setting Up Github Webhooks

-

Webhooks are a simple push mechanism that will send an event each time the webhook is triggered. Unfortunately, Webhooks are unique to each Organization and will need to be setup for each Org as desired. To do this, a user will need to be an Admin for the Org.

-

Steps

-
    -
  1. In your Organization Settings page, select Webhooks from the menu on the left.
  2. -
  3. On this page, you'll see all the existing Webhooks, click the Add webhook button to add one to send data to Splunk
  4. -
  5. The Payload URL will be the Splunk HTTP Event Collector endpoint that was enabled above. It should look something like: https://YOUR SPLUNK URL:8088/services/collector/raw?token=THE TOKEN FROM ABOVE. The port here of 8088 may be different for your Splunk Environment, so please confirm the HEC port with your Splunk Admin team.
  6. -
  7. For Content Type, you'll want to select application/json as the best option.
  8. -
  9. You can choose to send just push events, All events, or manually select specific events from the list available. However, only some events have related Splunk eventtypes available to differentiate them within Splunk. See the table of available eventtypes below.
  10. -
  11. Once you click Add Webhook, a sample event will be triggered and it's status and response from the HTTP Event Collector should show below. Confirm that the response is OK. Otherwise triage as needed based on the HTTP Response provided.
  12. -
-

Once that is complete and webhooks are triggering, you'll want to update the macro used for Webhook based dashboards. To do this:

-
    -
  1. In Splunk, under Settings > Advanced Search, you'll see an entry for Macros, click that.
  2. -
  3. There is a macro called github_webhooks, you'll need to update it to specificy the Index used by the HTTP Event Collector token created earlier. Once saved, any dashboards that report on Webhook events should automatically start displaying data.
  4. -
-

Available Webhook Eventtypes

- - - - - - - - - - - - - - - - - - - - - - - - - - -
Splunk EventtypeGithub Webhook EventDescription
Github::RepoRepositoriesRepository created, deleted, archived, unarchived, publicized, privatized, edited, renamed, or transferred.
Github::PushPushesGit push to a repository.
Github::PullRequestPull requestsPull request opened, closed, reopened, edited, assigned, unassigned, review requested, review request removed, labeled, unlabeled, synchronized, ready for review, converted to draft, locked, unlocked, auto merge enabled, auto merge disabled, milestoned, or demilestoned.
Github::PullRequest::ReviewPull request reviewsPull request review submitted, edited, or dismissed.
- -
-
-
diff --git a/github_app_for_splunk/default/data/ui/views/welcome_page.xml b/github_app_for_splunk/default/data/ui/views/welcome_page.xml new file mode 100644 index 0000000..84dcbe6 --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/welcome_page.xml @@ -0,0 +1,98 @@ + + + + + + +
+

Welcome to the GitHub App for Splunk!

+

+ This Splunk app is meant to be your single pane of glass for anything GitHub. Whether you're looking for audit log analytics, GitHub Enterprise Server monitoring, or other GitHub metrics, you're in the right place! +

+
+ +
+
+ + + + +
+

GitHub Enterprise Server

+

+ GitHub Enterprise Server users have several different parts of the App that may be helpful. +

    +
  1. The "Enterprise Server Monitor" drop down has several dashboards that report on the health and performance of your GHES environment
  2. +
  3. "Audit" drop down works for GHES as well as GitHub.com audit logs
  4. +
  5. The Alerts menu item contains all GitHub recommended alerts
  6. +
  7. Audit Log Activity provides a highlevel overview of what activity is going on in GitHub.
  8. +
  9. Repository Audit lets you review changes to individual or groups of repositories
  10. +
  11. User Change Audit is the best place to review actions taken by or made to individual users.
  12. +
+

+
+ +
+ + + +
+

GitHub Advanced Security

+

+ Open Source repositories and customers of GitHub Advanced Security have access to application security tooling such as Code Scanning, Secret Scanning, and Dependency Review. +

    +
  1. The Advanced Security Overview dashboard gives insight into the security posture of your GitHub Organization
  2. +
  3. The Code Scanning Alerts dashboard gives you access to alerts created by Code Scanning within your Organization
  4. +
  5. The Secret Scanning Alerts dashboard provides visibility into secrets like API keys and personal access tokens that have been checked into your repositories
  6. +
+

+
+ +
+
+ + + + +
+

How to collect GitHub Data

+

+ GitHub has several ways to collect data from their services depending on your needs. +

    +
  1. Audit Log data is available through a Splunk Add-On
  2. +
  3. Rich commit, pull request, and Code Scanning data is available through GitHub Webhooks
  4. +
+

+
+ +
+
+
diff --git a/github_app_for_splunk/default/data/ui/views/workflow_analysis.xml b/github_app_for_splunk/default/data/ui/views/workflow_analysis.xml new file mode 100644 index 0000000..2638fe0 --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/workflow_analysis.xml @@ -0,0 +1,84 @@ +
+ +
+ + + + -24h@h + now + + + + + repository.name + repository.name + + `github_webhooks` eventtype="GitHub::Workflow"|dedup repository.name| table repository.name + $timeTkn.earliest$ + $timeTkn.latest$ + + All + * + * + +
+ + + + Workflow Conclusions Over Time + + `github_webhooks` "workflow_run.name"="*" | spath "repository.full_name" | search repository.full_name="$repos$" | stats latest(_time) as _time, latest(workflow_run.conclusion) as workflow_run.conclusion by repository.full_name,workflow_run.name,workflow_run.id | timechart count by workflow_run.conclusion span=1h | rename null as "in-progress" + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

Workflow History

+
+
+ +
+
+
diff --git a/github_app_for_splunk/default/data/ui/views/workflow_analytics.xml b/github_app_for_splunk/default/data/ui/views/workflow_analytics.xml new file mode 100644 index 0000000..3890639 --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/workflow_analytics.xml @@ -0,0 +1,79 @@ +
+ +
+ + + + -30d@d + now + + + + + repository.name + repository.name + + `github_webhooks` eventtype="GitHub::Workflow"|dedup repository.name| table repository.name + $timeTkn.earliest$ + $timeTkn.latest$ + + All + * + * + , + " + " + +
+ + + Average Workflow Overview + + + `github_webhooks` eventtype="GitHub::Workflow" repository.name IN(""*"") | eval queued=if(action="requested",_time,NULL), completed=if(action="completed",_time,NULL) | stats min(queued) as queued, min(completed) as completed by repository.name,workflow_run.name,workflow_run.id | eval totalTime=completed-queued | fields repository.name,workflow_run.name, workflow_run.id, totalTime | stats avg(totalTime) as totalTime | eval totalTime=toString(round(totalTime),"Duration") + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + + + + + + + + + + + + + + + + Workflow Analytics by Job Name + + + `github_webhooks` eventtype="GitHub::Workflow" repository.name IN(""*"") | eval queued=if(action="requested",_time,NULL),completed=if(action="completed",_time,NULL) | stats min(queued) as queued, min(completed) as completed by repository.full_name,workflow_run.name,workflow_run.id | eval totalTime=completed-queued | fields repository.full_name,workflow_run.name, workflow_run.id, totalTime | stats avg(totalTime) as totalTime by repository.full_name,workflow_run.name | eval totalTime=toString(round(totalTime),"Duration") + $timeTkn.earliest$ + $timeTkn.latest$ + 1 + + + + + + + + + +
+
+
+
\ No newline at end of file diff --git a/github_app_for_splunk/default/data/ui/views/workflow_details.xml b/github_app_for_splunk/default/data/ui/views/workflow_details.xml new file mode 100644 index 0000000..846a3a3 --- /dev/null +++ b/github_app_for_splunk/default/data/ui/views/workflow_details.xml @@ -0,0 +1,214 @@ +
+ + + + `github_webhooks` "workflow_run.name"="$workflowName$" | fields * | spath "repository.full_name" + + $timeRange.earliest$ + $timeRange.latest$ + +
+ + + * + + + + * + + + + + + + + -24h@h + now + + + + + 25 + 10 + 25 + 50 + 100 + 250 + 25 + +
+ + + + + + + + + + Build Duration History + + + | eval started=if(action=="requested",_time, NULL), ended=if(action=="completed", _time, NULL) | stats latest(_time) as _time, latest(workflow_run.conclusion) as conclusion, earliest(started) as started, latest(ended) as ended by workflow_run.name, workflow_run.id, repository.full_name | eval duration=ended-started, queued=started-queued | table workflow_run.id, _time, duration | sort -_time | head 25 | sort _time | fields - _time + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Build Status History + + + | eval failed=if('workflow_run.conclusion'=="failure",1,0), successful=if('workflow_run.conclusion'=="success",1,0) | stats latest(_time) as _time, max(successful) as successful, max(failed) as failed, latest(workflow_run.conclusion) as conclusion by repository.full_name,workflow_run.name,workflow_run.id | table _time, workflow_run.id, successful,failed | sort -_time | head $workflowCount$ | sort _time | fields - _time + + + + + + + + + + + Build Status Overview + + + | stats latest(_time) as _time, latest(workflow_run.conclusion) as conclusion by repository.full_name,workflow_run.name,workflow_run.id | sort -_time | head $workflowCount$ | sort _time | fields - _time | stats count by conclusion + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Workflow Information + + `github_webhooks` (workflow_run.id=$workflow_id$ OR workflow_job.run_id=$workflow_id$) | eval started=if(action=="requested", _time, null), completed=if(action=="completed", _time,null) | stats latest(workflow_run.name) as WorkflowName, latest(workflow_run.id) as WorkflowID, latest(workflow_run.conclusion) as Status, latest(repository.full_name) as RepositoryName,earliest(started) as Started, latest(completed) as Completed, latest(workflow_run.head_branch) as Branch, latest(workflow_run.event) as Trigger, latest(workflow_run.run_number) as RunNumber, latest(workflow_run.run_attempt) as Attempt, latest(workflow_run.html_url) as URL | eval Duration=tostring(Completed-Started, "Duration"), Completed=strftime(Completed,"%Y-%m-%dT%H:%M:%S"), Started=strftime(Started,"%Y-%m-%dT%H:%M:%S") | fields WorkflowName, RepositoryName, Status, WorkflowID, RunNumber, Attempt, Started, Completed, Duration, Branch, Trigger, URL|transpose|rename column AS Details| rename "row 1" AS values + 0 + + 1 + + + + + + + + + + https://github.com/$repoName|n$/actions/runs/$workflow_id$ + +
+
+
+ + + + + + + +
+

Workflow Jobs

+
+
+ +
+
+ + + Workflow Run Logs + + + `github_workflow_logs` workflowID::$workflow_id$ | sort _time + 0 + 1 + + + + + + + + + + + + + + +
diff --git a/github_app_for_splunk/default/distsearch.conf b/github_app_for_splunk/default/distsearch.conf new file mode 100644 index 0000000..8683077 --- /dev/null +++ b/github_app_for_splunk/default/distsearch.conf @@ -0,0 +1,2 @@ +[replicationSettings:refineConf] +replicate.macros = true diff --git a/github_app_for_splunk/default/eventtypes.conf b/github_app_for_splunk/default/eventtypes.conf index 8048153..47e3b42 100644 --- a/github_app_for_splunk/default/eventtypes.conf +++ b/github_app_for_splunk/default/eventtypes.conf @@ -1,9 +1,30 @@ +[GitHub::Branch] +search = `github_webhooks` ref_type=branch + +[GitHub::Change] +search = `github_source` action=* sourcetype="github:enterprise:audit" OR sourcetype="github_audit" + +[GitHub::CodeScanning] +search = `github_webhooks` action IN ("appeared_in_branch", "closed_by_user", "created", "fixed", "reopened", "reopened_by_user") "commit_oid"=* + +[GitHub::CodeVulnerability] +search = `github_webhooks` (eventtype="GitHub::CodeScanning") "alert.html_url"="*/security/code-scanning/*" + [GitHub::Issue] search = `github_webhooks` action IN ("opened","edited","deleted","pinned","unpinned","closed","reopened","assigned","unassigned","labeled","unlabeled","locked","unlocked","transferred","milestoned","demilestoned") "issue.number"=* NOT "comment.body"=* [GitHub::Issue::Comment] search = `github_webhooks` action IN ("created","edited","deleted") "issue.number"=* "comment.body"=* +[GitHub::Project] +search = `github_webhooks` action IN ("created","edited","closed","reopenend","deleted") "project.number"=* + +[GitHub::Project::Card] +search = `github_webhooks` action IN ("created","edited","moved","converted","deleted") "project_card.id"=* + +[GitHub::Project::Column] +search = `github_webhooks` action IN ("created","edited","moved","deleted") "project_column.id"=* + [GitHub::PullRequest] search = `github_webhooks` action IN ("opened","edited","closed","assigned","unassigned","review_requested","review_request_removed","ready_for_review","converted_to_draft","labeled","unlabeled","synchronize","auto_merge_enabled","auto_merge_disabled","locked","unlocked","reopened") number=* "pull_request.id"=* @@ -13,14 +34,36 @@ search = `github_webhooks` action IN ("submitted","edited","dismissed") pull_req [GitHub::Push] search = `github_webhooks` after=* before=* "commits{}.id"=* ref=* "pusher.name"=* +[GitHub::Release] +search = `github_webhooks` action IN ("released","published", "created", "prereleased") release.id=* + +[GitHub::Release::Push] +color = et_blue +search = `github_webhooks` after=* before=* ref=refs/tags* + [GitHub::Repo] -search = `github_webhooks` action IN ("created","deleted","archived","unarchived","edited","renamed","transferred","publicized","privatized") "repository.name"=* NOT "pull_request.id"=* NOT "project_card.id"=* NOT "project.number"=* NOT "project_column.id"=* +search = `github_webhooks` action IN ("created","deleted","archived","unarchived","edited","renamed","transferred","publicized","privatized") "repository.name"=* NOT "pull_request.id"=* NOT "project_card.id"=* NOT "project.number"=* NOT "project_column.id"=* NOT "check_run.id"=* NOT "alert.created_at"=* NOT "alert.number"=* -[GitHub::Project] -search = `github_webhooks` action IN ("created","edited","closed","reopenend","deleted") "project.number"=* +[GitHub::SecretScanning] +search = `github_webhooks` action IN ("created", "resolved") "alert.secret_type"=* -[GitHub::Project::Card] -search = `github_webhooks` action IN ("created","edited","moved","converted","deleted") "project_card.id"=* +[GitHub::VulnerabilityAlert] +search = `github_webhooks` action IN ("create", "dismiss", "resolve") "alert.external_identifier"=* affected_package_name=* -[GitHub::Project::Column] -search = `github_webhooks` action IN ("created","edited","moved","deleted") "project_column.id"=* +[GitHub::Workflow] +search = `github_webhooks` workflow.id=* action IN("requested","completed") + +[GitHub::Workflow::Job] +search = `github_webhooks` workflow_job.id=* action IN("queued","in_progress","completed") + +[github:enterprise:authentication] +search = `github_source` sourcetype=GithubEnterpriseServerAuditLog app=* authentication_service=* signature=* + +[github_fork] +search = `github_json` is_fork="true" src_user_type=User + +[github_json_change_events] +search = index=github action=* repo=* + +[github_workflow_change] +search = index=github (workflow_run.event=* workflow_run.name=* workflow_run.head_commit.author.name=* workflow_run.head_repository.full_name=*) diff --git a/github_app_for_splunk/default/macros.conf b/github_app_for_splunk/default/macros.conf index 8b8357a..7bb5799 100644 --- a/github_app_for_splunk/default/macros.conf +++ b/github_app_for_splunk/default/macros.conf @@ -3,11 +3,47 @@ definition = index=github_collectd iseval = 0 +[github_json] +definition = index="github" sourcetype=github_json +iseval = 0 + [github_source] -definition = (index="github" source="ghe_audit_log_monitoring://*") OR (index=ghes source=github_audit) +definition = index="github" iseval = 0 [github_webhooks] definition = index=github_webhook iseval = 0 +[github_workflow_logs] +definition = index="github_workflow_logs" +iseval = 0 + +[devops_indexes] +definition = index="github_webhook" OR index="github_webhook2" OR index="github" +iseval = 0 + +[individual_commits] +definition = | spath commits{} output=commits \ +| mvexpand commits \ +| rex field=commits "(?<=\"id\"\:\")(?\w*)"\ +| rex field=commits "(?<=\"message\"\:\")(?(\w|\s)*)"\ +| rex field=commits "(?<=\"username\"\:\")(?(\w|-)*(?=\"))"\ +| rex field=commits "(?<=\"timestamp\"\:\")(?[^\"]*(?=\"))"\ +| rex field=commits "(?<=\"added\"\:\[)(?[^\]]*(?=\]))"\ +| rex field=commits "(?<=\"removed\"\:\[)(?[^\]]*(?=\]))"\ +| rex field=commits "(?<=\"modified\"\:\[)(?[^\]]*(?=\]))" +iseval = 0 + +[extract_branch_issuenumber] +definition = | eval branch = if(('ref_type'=="branch" AND 'ref'!=""), 'ref', "") \ +| eval ref = if((isnull('ref') AND isnotnull('pull_request.head.ref') AND ('eventtype'=="GitHub::PullRequest" OR 'eventtype'=="GitHub::PullRequest::Review")), 'pull_request.head.ref', if((isnull('ref') AND isnotnull('pull_request.base.ref') AND ('eventtype'=="GitHub::PullRequest" OR 'eventtype'=="GitHub::PullRequest::Review")), 'pull_request.base.ref', 'ref'))\ +| rex field="ref" "(?(?<=refs\/heads\/).*)" \ +| eval commit_branch = if((isnull('commit_branch') AND isnotnull('pull_request.head.ref') AND ('eventtype'=="GitHub::PullRequest" OR 'eventtype'=="GitHub::PullRequest::Review")), 'pull_request.head.ref', if((isnull('commit_branch') AND isnotnull('pull_request.base.ref') AND ('eventtype'=="GitHub::PullRequest" OR 'eventtype'=="GitHub::PullRequest::Review")), 'pull_request.base.ref', if((isnull('commit_branch') AND isnotnull('ref')), 'ref', 'commit_branch')))\ +| rex field="commit_branch" "(?^\d*)" +iseval = 0 + +[extract_release_push_tags] +definition = | eval ref_tags = if((isnotnull('ref') AND eventtype="GitHub::Release::Push"), ref, null())\ +| rex field="ref_tags" "(?(?<=refs\/tags\/).*)" +iseval = 0 diff --git a/github_app_for_splunk/default/props.conf b/github_app_for_splunk/default/props.conf index 1b77c17..d3cde4d 100644 --- a/github_app_for_splunk/default/props.conf +++ b/github_app_for_splunk/default/props.conf @@ -1,17 +1,35 @@ [default] -[GithubEnterpriseServerAuditLog] + +[GithubEnterpriseServerLog] +# Basic settings DATETIME_CONFIG = LINE_BREAKER = ([\r\n]+) NO_BINARY_CHECK = true category = Application pulldown_type = true +TIME_FORMAT = +TZ = +#Calculated Fields EXTRACT-audit_event = github_audit\[\d+\]\:\s(?.*) EXTRACT-audit_fields = \"(?<_KEY_1>.*?)\"\:\"*(?<_VAL_1>.*?)\"*, -EXTRACT-github_log_type = \d+\:\d+\:\d+\s\d+\-\d+\-\d+\-\d+\s(?.*?)\: +EXTRACT-github_log_type = \d+\:\d+\:\d+\s[\d\w\-]+\s(?.*?)\: EXTRACT-github_document_id = \"_document_id\"\:\"(?.*?)\" FIELDALIAS-source = github_log_type AS source +FIELDALIAS-user = actor AS user + +[GithubEnterpriseServerAuditLog] +#Calculated Fields +EVAL-action = "success" +EVAL-signature = "Login by " + src_user + " to " + authentication_service + " service" +EVAL-src = replace(source_host, "\-", ".") +EVAL-user = if(isnotnull(src_user), user, if(isnotnull(user), user, NULL)) +# Field Extractions +EXTRACT-source,app,authentication_service,authentication_method,path,user,service = \<\d+\>\w+\s\d+\s\d+:\d+:\d+ (?\S+)\s+(?[^:]+)+:\s+(?\S+) : TTY=(?\S+) ; PWD=(?\S+) ; USER=(?\S+) ; COMMAND=(?.*) +# Field Aliases +FIELDALIAS-user = actor AS user [collectd_github] +# Basic settings ADD_EXTRA_TIME_FIELDS = false ANNOTATE_PUNCT = false BREAK_ONLY_BEFORE_DATE = @@ -23,26 +41,163 @@ NO_BINARY_CHECK = true SHOULD_LINEMERGE = false category = Metrics description = Collectd daemon format. Uses the write_http plugin to send metrics data to a Splunk platform data input via the HTTP Event Collector. -disabled = false pulldown_type = 1 [github_json] +# Basic settings +TRUNCATE = 100000 +KV_MODE = json +pulldown_type = true DATETIME_CONFIG = LINE_BREAKER = ([\r\n]+) -NO_BINARY_CHECK = true -TRUNCATE = 250000 -category = Application -pulldown_type = 1 +SHOULD_LINEMERGE = false +#Calculated Fields +EVAL-action = if(isnotnull('action'), 'action', null()) +EVAL-asset_content_type = if(isnotnull('release.assets{}.content_type'), 'release.assets{}.content_type', null()) +EVAL-asset_name = if(isnotnull('release.assets{}.name'), 'release.assets{}.name', null()) +EVAL-asset_uploader_login = if(isnotnull('release.assets{}.uploader.login'), 'release.assets{}.uploader.login', null()) +EVAL-assigned_reviewers = if(isnotnull('pull_request.requested_reviewers{}.login'), 'pull_request.requested_reviewers{}.login', null()) +EVAL-assigned_user = if(isnotnull('issue.assignee.login'), 'issue.assignee.login', 'assignee.login') +EVAL-attempt_number = if(isnotnull('workflow_run.run_attempt'), 'workflow_run.run_attempt',null()) +EVAL-branch = if(('ref_type'=="branch" AND 'ref'!=""), 'ref', if(isnotnull('commit_branch'), 'ref', null())) +EVAL-body = "Secrete Leakage: ".'alert.secret_type' +EVAL-category = if(isnotnull(alert_description), "code", if(isnotnull(affected_package_name), "dependency", if(isnotnull(secret_type), "secret", ""))) +EVAL-closed_date = if(isnotnull('issue.closed_at'), 'issue.closed_at', null()) +EVAL-commit_branch = if((isnull('commit_branch') AND isnotnull('pull_request.head.ref') AND ('eventtype'=="GitHub::PullRequest" OR 'eventtype'=="GitHub::PullRequest::Review")), 'pull_request.head.ref', if((isnull('commit_branch') AND isnotnull('pull_request.base.ref') AND ('eventtype'=="GitHub::PullRequest" OR 'eventtype'=="GitHub::PullRequest::Review")), 'pull_request.base.ref', if((isnull('commit_branch') AND isnotnull('ref')), 'ref', 'commit_branch'))) +EVAL-commit_files_added = if(isnotnull('commits{}.added{}'), 'commits{}.added{}', null()) +EVAL-commit_files_modified = if(isnotnull('commits{}.modified{}'), 'commits{}.modified{}', null()) +EVAL-commit_files_removed = if(isnotnull('commits{}.removed{}'), 'commits{}.removed{}', null()) +EVAL-commit_hash = if(isnotnull('commits{}.id'), 'commits{}.id', null()) +EVAL-commit_message = if(isnotnull('commits{}.message'), 'commits{}.message', null()) +EVAL-commit_timestamp = if(isnotnull('commits{}.timestamp'), 'commits{}.timestamp', null()) +EVAL-commit_username = if(isnotnull('commits{}.author.username'), 'commits{}.author.username', null()) +EVAL-commits_author_list = if(isnotnull('commits{}.author.username'), 'commits{}.author.username', null()) +EVAL-commits_list = if(isnotnull('commits{}.id'), 'commits{}.id', null()) +EVAL-commits_message_list = if(isnotnull('commits{}.message'), 'commits{}.message', null()) +EVAL-commits_timestamp_list = if(isnotnull('commits{}.timestamp'), 'commits{}.timestamp', null()) +EVAL-completed = if(action="completed",_time, NULL) +EVAL-current_priority = if('issue.labels{}.name' like "Priority%", mvfilter(match('issue.labels{}.name', "[pP]riority:\sLow|[pP]riority:\sHigh|[pP]riority:\sMedium")), null()) +EVAL-current_push = if(isnotnull('after'), 'after', null()) +EVAL-description = "Secrete Leakage: ".'alert.secret_type' +EVAL-dest = "((repo)|(full_name))":"(?[^/]+) +EVAL-dvc = replace(host, ":\d+", "") +EVAL-earliest_commit_author_user = if(isnotnull(mvindex('commits{}.author.username', 0)), mvindex('commits{}.author.username', 0) , null()) +EVAL-earliest_commit_date = if((isnotnull('commits{}.id') AND isnull('commit_timestamp')), 'head_commit.timestamp', if((isnotnull('commits{}.id') AND isnotnull('commit_timestamp')), 'commit_timestamp', "")) +EVAL-earliest_commit_hash = if(isnotnull(mvindex('commits{}.id', 0)), mvindex('commits{}.id', 0) , null()) +EVAL-earliest_commit_message = if(isnotnull(mvindex('commits{}.message', 0)), mvindex('commits{}.message', 0) , null()) +EVAL-files_added = if(isnotnull('commits{}.added{}'), 'commits{}.added{}', null()) +EVAL-files_modified = if(isnotnull('commits{}.modified{}'), 'commits{}.modified{}', null()) +EVAL-files_removed = if(isnotnull('commits{}.removed{}' ), 'commits{}.removed{}' , null()) +EVAL-id = organization."/".repository_name."/".'alert.number' +EVAL-issue_assignees = if('issue.assignees{}.login'!="", 'issue.assignees{}.login', null) +EVAL-issue_assigned_date = if("issue.updated_at"!="" AND action="assigned", 'issue.updated_at', null()) +EVAL-issue_description = if(isnotnull('issue.body'), 'issue.body', null()) +EVAL-issue_href = if(isnotnull('issue.html_url'), 'issue.html_url', null()) +EVAL-issue_subject = if(isnotnull('issue.title'), 'issue.title', null()) +EVAL-issue_tags = if(isnotnull('issue.labels{}.name'), 'issue.labels{}.name', null()) +EVAL-issueNumber = if(isnotnull('issue.number'), 'issue.number', 'issueNumber') +EVAL-last_updated = if("issue.update_at"="*", 'issue.update_at', strftime(_time,"%Y-%m-%d %H:%M:%S")) +EVAL-latest_commit_author_user = if((isnotnull('commits{}.id') AND isnull('commit_username')), 'head_commit.author.username', if((isnotnull('commits{}.id') AND isnotnull('commit_username')), 'commit_username', "")) +EVAL-latest_commit_date = if((isnotnull('commits{}.id') AND isnull('commit_timestamp')), 'head_commit.timestamp', if((isnotnull('commits{}.id') AND isnotnull('commit_timestamp')), 'commit_timestamp', "")) +EVAL-latest_commit_hash = if((isnotnull('commits{}.id') AND isnull('commit_hash')), 'head_commit.id', if((isnotnull('commits{}.id') AND isnotnull('commit_hash')), 'commit_hash', if(isnotnull(after), after, null()))) +EVAL-latest_commit_message = if((isnotnull('commits{}.id') AND isnull('commit_message')), 'head_commit.message', if((isnotnull('commits{}.id') AND isnotnull('commit_message')), 'commit_message', "")) +EVAL-name = if(isnotnull('workflow_job.name'), 'workflow_job.name',if(isnotnull('workflow_run.name'), 'workflow_run.name',null())) +EVAL-object_attrs = "branch:" + pull_request_title + "|business:" + business +EVAL-object_category = if(isnotnull(workflow_run.event), "workflow", if(isnotnull(repo), "repository", "")) +EVAL-organization_name = if(isnotnull('organization.login'), 'organization.login', null()) +EVAL-pipeline_id = if(isnotnull('workflow.id'), 'workflow.id', if(isnotnull('workflow_job.id'), 'workflow_job.id', null())) +EVAL-pr_author_login = if(isnotnull('sender.login'), 'sender.login', null()) +EVAL-pr_created_date = if(isnotnull('pull_request.created_at'), 'pull_request.created_at', null()) +EVAL-pr_id = if((isnotnull('pull_request.number')), 'pull_request.number', if((isnotnull('number')), 'number', null())) +EVAL-pr_message = if(isnotnull('pull_request.body'), 'pull_request.body', null()) +EVAL-previous_push = if(isnotnull('before'), 'before', null()) +EVAL-pullrequest_base_sha = 'pull_request.base.sha' +EVAL-pullrequest_base_user_login = 'pull_request.base.user.login' +EVAL-pull_request_merged = if(isnotnull('pull_request.merged'), 'pull_request.merged', null()) +EVAL-pull_request_merged_at = if(isnotnull('pull_request.merged_at'), 'pull_request.merged_at', null()) +EVAL-ref = if((isnull('ref') AND isnotnull('pull_request.head.ref') AND ('eventtype'=="GitHub::PullRequest" OR 'eventtype'=="GitHub::PullRequest::Review")), 'pull_request.head.ref', if((isnull('ref') AND isnotnull('pull_request.base.ref') AND ('eventtype'=="GitHub::PullRequest" OR 'eventtype'=="GitHub::PullRequest::Review")), 'pull_request.base.ref', 'ref')) +EVAL-ref_tags = if((isnotnull('ref') AND eventtype="GitHub::Release::Push"), ref, null()) +EVAL-release_author = if(isnotnull('release.author.login'), 'release.author.login', null()) +EVAL-release_created_at = if(isnotnull('release.created_at'), 'release.created_at', null()) +EVAL-release_name = if(isnotnull('release.name'), 'release.name', null()) +EVAL-release_status = if(isnotnull('action'), 'action', null()) +EVAL-release_sender_name = if(isnotnull('sender.login'), 'sender.login', null()) +EVAL-release_tags = if(isnotnull('release.tag_name'), 'release.tag_name', if(isnotnull('release_tags'), release_tags, "beep")) +EVAL-release_url = if(isnotnull('release.url'), 'release.url', null()) +EVAL-repository_name = if(isnotnull('repository.name'), 'repository.name', null()) +EVAL-repository_organization = if(isnotnull('organization.login'), 'organization.login', null()) +EVAL-result = "success" +EVAL-review_author_login = if(isnotnull('review.user.login'), 'review.user.login', null()) +EVAL-review_state = if(isnotnull('review.state'), 'review.state', null()) +EVAL-run_id = if(isnotnull('workflow_job.run_id'), 'workflow_job.run_id', if(isnotnull('workflow_run.id'), 'workflow_run.id', null())) +EVAL-run_number = if(isnotnull('workflow_run.run_number'), 'workflow_run.run_number', null()) +EVAL-severity = if(isnotnull(secret_type),"critical",severity) +EVAL-severity_id = CASE(severity=="critical",4, severity_level=="critical",4, severity=="high",3, severity_level=="high",3, severity=="moderate",2,severity_level=="moderate", 2, isnotnull(secret_type),4, true=true, 1) +EVAL-signature = CASE(isnull(alert_description), UPPER(severity) + " Dependency Vulnerability on package " + affected_package_name, 1=1, alert_description) +EVAL-started = if(action="requested",_time, if(isnotnull('workflow_run.run_started_at'),round(strptime('workflow_run.run_started_at', "%Y-%m-%dT%H:%M:%SZ"),0), if(isnotnull('workflow_job.started_at'), round(strptime('workflow_job.started_at', "%Y-%m-%dT%H:%M:%SZ"),0), null()))) +EVAL-started_by_id = if(isnotnull('sender.login'), 'sender.login', null()) +EVAL-started_by_name = if(isnotnull('sender.login'), 'sender.login', null()) +EVAL-status = if(isnotnull('workflow_job.status'), 'workflow_job.status', if(isnotnull('workflow_run.status'), 'workflow_run.status', null())) +EVAL-status_update_date = if(('action'!="" AND isnotnull('issue.updated_at')), 'issue.updated_at', null()) +EVAL-status_current = if(action=="deleted", "deleted", 'issue.state') +EVAL-submitter_user = if(isnotnull('issue.user.login'), 'issue.user.login', null()) +EVAL-submission_date = if(isnotnull('issue.created_at'), 'issue.created_at', null()) +EVAL-user = case(isnotnull(user),user,isnotnull(user1),user1,isnotnull(user2),user2,isnotnull(user3),user3,isnotnull(user4),user4,1==1,"unknown") +EVAL-vendor_product = "github" +EVAL-xref = if(isnotnull(affected_package_name), affected_package_name, alert_location_path) +# Field Extractions +EXTRACT-change_type = "action":"(?[^\.]+).*","((actor)|(workflow)|(_document)) +EXTRACT-commit_branch = (?(?<=refs\/heads\/)[\-\w\d\s]*) +EXTRACT-commit_hash = | spath commits{} output=commits | mvexpand commits | rex field=commits "(?<=\"id\"\:\")(?\w*)" +EXTRACT-release_tags = "ref":"refs\/tags\/(?[0-9|aA-zZ.]*)" +EXTRACT-object = "repo":".+/{1}(?[^"]+)", +# Field Aliases +FIELDALIAS-dependabot = "alert.affected_package_name" AS affected_package_name "alert.external_identifier" AS cve "alert.external_reference" AS url "alert.most_recent_instance.location.path" AS alert_location_path "alert.rule.description" AS alert_description "alert.rule.security_severity_level" AS severity_level "alert.severity" AS severity eventtype AS vendor_product "repository.owner.login" AS user3 +FIELDALIAS-RepoAlias = "organization.login" ASNEW organization "repository.name" ASNEW repository_name +FIELDALIAS-secret = "alert.html_url" AS url "alert.secret_type" AS secret_type "repository.owner.login" AS user4 +FIELDALIAS-user = actor AS user1 +FIELDALIAS-workflow_changes = action ASNEW command actor_ip ASNEW src document_id ASNEW object_id pull_request_url ASNEW object_path "workflow_run.event" ASNEW command "workflow_run.head_branch" ASNEW branch "workflow_run.head_commit.author.name" ASNEW user2 "workflow_run.head_repository.full_name" ASNEW repository +# Other +REPORT-issueNumber = issueNumber [github_audit] -DATETIME_CONFIG = -INDEXED_EXTRACTIONS = json +# Basic settings +KV_MODE = JSON +DATETIME_CONFIG = LINE_BREAKER = ([\r\n]+) -NO_BINARY_CHECK = true -TIMESTAMP_FIELDS = @timestamp -TIME_FORMAT = %s%3N -TRUNCATE = 1000000 -TZ = GMT -category = Application -disabled = false -pulldown_type = 1 +SHOULD_LINEMERGE = false +pulldown_type = true +# Calculated Fields +EVAL-action = case(change_type="change_merge_setting", "modified", change_type="prepared_workflow_job", "modified", change_type="add_admin", "created", change_type="create", "created", change_type="invite_admin", "invite", change_type="invite_member", "invite", change_type="add_member", "modified", change_type="update_member", "modified", change_type="remove_member", "modified", change_type="grant", "modified", change_type="deauthorize", "modified", change_type="import_license_usage", "read", change_type="clone", "read", change_type="upload_license_usage", "read", change_type="repositories_added", "created", change_type="advanced_security_enabled", "modified", change_type="change_merge_setting", "modified", change_type="push", "modified", change_type="login", "logon", change_type="disabled", "modified", change_type="fetch", "read", change_type="disable", "modified", change_type="actions_enabled", "modified", change_type="add_organization", "modified", change_type="advanced_security_enabled_for_new_repos", "modified", change_type="advanced_security_policy_update", "modified", change_type="check", "read", change_type="authorized_users_teams", "modified", change_type="close", "modified", change_type="created_workflow_run", "created", change_type="enable", "modified", change_type="destroy", "deleted", change_type="enable_workflow", "modified", change_type="events_changed", "modified", change_type="completed_workflow_run", "modified", change_type="config_changed", "modified", change_type="merge", "modified", change_type="oauth_app_access_approved", "created", change_type="plan_change", "modified", change_type="remove organization", "modified", change_type="repositories_removed", "deleted", change_type="resolve", "updated", change_type="update", "updated", change_type="update_terms_of_service", "updated", change_type="remove_organization", "deleted", change_type="enable_saml", "modified", change_type="update_saml_provider_settings", "updated", change_type="disable_saml", "disabled", change_type="disable_oauth_app_restrictions", "disabled", change_type="oauth_app_access_denied", "denied", change_type="disable_two_factor_requirement", "disabled", change_type="enable_two_factor_requirement", "enable", 1=1, change_type) +EVAL-command = mvdedup(action) +EVAL-dvc = replace(host, ":\d+", "") +EVAL-object = if(change_type=="repo" OR change_type="repository_secret_scanning", repo, if(change_type=="integration_installation",name,if(isnotnull(org), org, if(isnotnull(name), name,NULL)))) +EVAL-object_category = case( change_type=="repo", "repository", change_type=="integration_installation","integration", isnotnull(repo), "repository", isnotnull(permission), mvdedup(permission), 1=1, NULL) +EVAL-object_attrs = if(isnotnull(is_public_repo), "public:" + is_public_repo, if(isnotnull(repository_public), "public:" + repository_public, if(isnotnull(public_repo), "public:" + public_repo, ""))) +EVAL-protocol = mvdedup(transport_protocol_name) +EVAL-status = "success" +EVAL-user = mvdedup(user) +EVAL-vendor_product = "github" +# Field Extractions +EXTRACT-change_type = "action":"[A-z0-9_]+\.(?[^"]+)"," +EXTRACT-object_path,object = "repo":"(?[^"]+)/(?[^"]+)"," +# Field Aliases +FIELDALIAS-user = actor AS user "data.public_repo" AS is_public_repo org AS vendor sc4s_container AS dvc + +[github:enterprise:audit] +# Calculated Fields +EVAL-action = case(change_type="change_merge_setting", "modified", change_type="prepared_workflow_job", "modified", change_type="add_admin", "created", change_type="create", "created", change_type="invite_admin", "invite", change_type="invite_member", "invite", change_type="add_member", "modified", change_type="update_member", "modified", change_type="remove_member", "modified", change_type="grant", "modified", change_type="deauthorize", "modified", change_type="import_license_usage", "read", change_type="clone", "read", change_type="upload_license_usage", "read", change_type="repositories_added", "created", change_type="advanced_security_enabled", "modified", change_type="change_merge_setting", "modified", change_type="push", "modified", change_type="login", "logon", change_type="disabled", "modified", change_type="fetch", "read", change_type="disable", "modified", change_type="actions_enabled", "modified", change_type="add_organization", "modified", change_type="advanced_security_enabled_for_new_repos", "modified", change_type="advanced_security_policy_update", "modified", change_type="check", "read", change_type="authorized_users_teams", "modified", change_type="close", "modified", change_type="created_workflow_run", "created", change_type="enable", "modified", change_type="destroy", "deleted", change_type="enable_workflow", "modified", change_type="events_changed", "modified", change_type="completed_workflow_run", "modified", change_type="config_changed", "modified", change_type="merge", "modified", change_type="oauth_app_access_approved", "created", change_type="plan_change", "modified", change_type="remove organization", "modified", change_type="repositories_removed", "deleted", change_type="resolve", "updated", change_type="update", "updated", change_type="update_terms_of_service", "updated", change_type="remove_organization", "deleted", change_type="enable_saml", "modified", change_type="update_saml_provider_settings", "updated", change_type="disable_saml", "disabled", change_type="disable_oauth_app_restrictions", "disabled", change_type="oauth_app_access_denied", "denied", change_type="disable_two_factor_requirement", "disabled", change_type="enable_two_factor_requirement", "enable", 1=1, change_type) +EVAL-command = mvdedup(action) +EVAL-dvc = replace(host, ":\d+", "") +EVAL-object_attrs = if(isnotnull(is_public_repo), "public:" + is_public_repo, if(isnotnull(repository_public), "public:" + repository_public, if(isnotnull(public_repo), "public:" + public_repo, ""))) +EVAL-object_category = case( change_type=="repo", "repository", change_type=="integration_installation","integration", isnotnull(repo), "repository", isnotnull(permission), mvdedup(permission), 1=1, NULL) +EVAL-protocol = mvdedup(transport_protocol_name) +EVAL-status = "success" +EVAL-user = mvdedup(user) +EVAL-vendor_product = "github" +# Field Extractions +EXTRACT-change_type = "action":"[A-z0-9_]+\.(?[^"]+)"," +EXTRACT-object_path,object = "repo":"(?[^"]+)/(?[^"]+)"," +# Field Aliases +FIELDALIAS-field mapping = "data.public_repo" ASNEW is_public_repo org ASNEW vendor sc4s_container ASNEW dvc +FIELDALIAS-user = actor AS user diff --git a/github_app_for_splunk/default/savedsearches.conf b/github_app_for_splunk/default/savedsearches.conf new file mode 100644 index 0000000..5b79f2f --- /dev/null +++ b/github_app_for_splunk/default/savedsearches.conf @@ -0,0 +1,158 @@ +[GitHub Disk Utilization Over 70%] +action.keyindicator.invert = 0 +action.makestreams.param.verbose = 0 +action.nbtstat.param.verbose = 0 +action.notable.param.verbose = 0 +action.nslookup.param.verbose = 0 +action.ping.param.verbose = 0 +action.risk.forceCsvResults = 1 +action.risk.param.verbose = 0 +action.send2uba.param.verbose = 0 +action.threat_add.param.verbose = 0 +alert.digest_mode = 0 +alert.suppress = 0 +alert.track = 1 +alert_condition = search disk_util > 70 +counttype = custom +cron_schedule = 5 * * * * +disabled = 1 +dispatch.earliest_time = -5m +dispatch.latest_time = now +display.events.fields = ["source","sourcetype","eventtype","test","ticket","issue","issueNumber","mergeTime","reviewTime","sha","host"] +display.general.type = statistics +display.page.search.mode = verbose +display.page.search.tab = statistics +display.visualizations.charting.axisY.abbreviation = auto +display.visualizations.charting.chart = line +display.visualizations.charting.chart.nullValueMode = connect +display.visualizations.charting.fieldColors = {"Avg":"#1e93c6","Max":"#f2b827"} +display.visualizations.charting.fieldDashStyles = {"Avg":"Solid","Max":"Solid"} +display.visualizations.charting.legend.mode = seriesCompare +display.visualizations.custom.type = sunburst_viz.sunburst_viz +enableSched = 1 +request.ui_dispatch_app = github_app_for_splunk +request.ui_dispatch_view = search +search = | mstats avg(_value) as "Avg" WHERE `github_collectd` AND metric_name="df.df_complex.*.value" AND plugin_instance=root AND host="*" span=10s BY metric_name, host\ +| eval disk_gb = Avg / 1024 / 1024 / 1024\ +| search metric_name !="*.reserved*"\ +| eval metric_name=mvindex(split(metric_name,"."),2)\ +| stats avg("disk_gb") as "Avg" by metric_name, host\ +| xyseries host metric_name Avg\ +| eval disk_total=used+free\ +| eval disk_util=(used/disk_total)*100\ +| fields host disk_util + +[GitHub Disk Utilization Over 85%] +action.keyindicator.invert = 0 +action.makestreams.param.verbose = 0 +action.nbtstat.param.verbose = 0 +action.notable.param.verbose = 0 +action.nslookup.param.verbose = 0 +action.ping.param.verbose = 0 +action.risk.forceCsvResults = 1 +action.risk.param.verbose = 0 +action.send2uba.param.verbose = 0 +action.threat_add.param.verbose = 0 +alert.digest_mode = 0 +alert.severity = 5 +alert.suppress = 0 +alert.track = 1 +alert_condition = search disk_util > 85 +counttype = custom +cron_schedule = 5 * * * * +disabled = 1 +dispatch.earliest_time = -5m +dispatch.latest_time = now +display.events.fields = ["source","sourcetype","eventtype","test","ticket","issue","issueNumber","mergeTime","reviewTime","sha","host"] +display.general.type = statistics +display.page.search.mode = verbose +display.page.search.tab = statistics +display.visualizations.charting.axisY.abbreviation = auto +display.visualizations.charting.chart = line +display.visualizations.charting.chart.nullValueMode = connect +display.visualizations.charting.fieldColors = {"Avg":"#1e93c6","Max":"#f2b827"} +display.visualizations.charting.fieldDashStyles = {"Avg":"Solid","Max":"Solid"} +display.visualizations.charting.legend.mode = seriesCompare +display.visualizations.custom.type = sunburst_viz.sunburst_viz +enableSched = 1 +request.ui_dispatch_app = github_app_for_splunk +request.ui_dispatch_view = search +search = | mstats avg(_value) as "Avg" WHERE `github_collectd` AND metric_name="df.df_complex.*.value" AND plugin_instance=root AND host="*" span=10s BY metric_name, host\ +| eval disk_gb = Avg / 1024 / 1024 / 1024\ +| search metric_name !="*.reserved*"\ +| eval metric_name=mvindex(split(metric_name,"."),2)\ +| stats avg("disk_gb") as "Avg" by metric_name, host\ +| xyseries host metric_name Avg\ +| eval disk_total=used+free\ +| eval disk_util=(used/disk_total)*100\ +| fields host disk_util + +[GitHub Load Average Above 1] +action.keyindicator.invert = 0 +action.makestreams.param.verbose = 0 +action.nbtstat.param.verbose = 0 +action.notable.param.verbose = 0 +action.nslookup.param.verbose = 0 +action.ping.param.verbose = 0 +action.risk.forceCsvResults = 1 +action.risk.param.verbose = 0 +action.send2uba.param.verbose = 0 +action.threat_add.param.verbose = 0 +alert.digest_mode = 0 +alert.suppress = 0 +alert.track = 1 +alert_condition = search load.longterm > 1 +counttype = custom +cron_schedule = 1 * * * * +disabled = 1 +dispatch.earliest_time = -5m +dispatch.latest_time = now +display.events.fields = ["source","sourcetype","eventtype","test","ticket","issue","issueNumber","mergeTime","reviewTime","sha","host"] +display.general.type = statistics +display.page.search.mode = verbose +display.page.search.tab = statistics +display.visualizations.charting.axisY.abbreviation = auto +display.visualizations.charting.chart = line +display.visualizations.charting.chart.nullValueMode = connect +display.visualizations.charting.fieldColors = {"Avg":"#1e93c6","Max":"#f2b827"} +display.visualizations.charting.fieldDashStyles = {"Avg":"Solid","Max":"Solid"} +display.visualizations.charting.legend.mode = seriesCompare +display.visualizations.custom.type = sunburst_viz.sunburst_viz +enableSched = 1 +request.ui_dispatch_app = github_app_for_splunk +request.ui_dispatch_view = search +search = | mstats avg(_value) as "Avg" WHERE `github_collectd` AND metric_name="load.longterm" AND host="*" span=10s BY metric_name, host\ +| stats avg(Avg) as "Load" by metric_name, host\ +| xyseries host metric_name Load + +[generate_user_access_lookup] +action.email.useNSSubject = 1 +action.keyindicator.invert = 0 +action.makestreams.param.verbose = 0 +action.nbtstat.param.verbose = 0 +action.notable.param.verbose = 0 +action.nslookup.param.verbose = 0 +action.ping.param.verbose = 0 +action.risk.forceCsvResults = 1 +action.risk.param.verbose = 0 +action.send2uba.param.verbose = 0 +action.threat_add.param.verbose = 0 +alert.track = 0 +cron_schedule = 0 6 * * * +disabled = 1 +description = This search will generate a lookup about the access to devsecops environment and write it to a lookup file +dispatch.earliest_time = -30d@d +dispatch.latest_time = now +display.events.fields = ["host","source","sourcetype","sc4s_container","sc4s_destport","sc4s_fromhostip","sc4s_proto","sc4s_syslog_facility","sc4s_syslog_format","sc4s_syslog_severity","sc4s_vendor_product","data.permission","permission","old_permission","user_id","action","app","user_agent","url","status","category","signature","COMMAND","USER","user"] +display.general.timeRangePicker.show = 0 +display.general.type = statistics +display.page.search.mode = verbose +display.page.search.tab = statistics +display.visualizations.charting.chart = line +display.visualizations.show = 0 +enableSched = 1 +request.ui_dispatch_app = github_app_for_splunk +request.ui_dispatch_view = search +search = | pivot Change Auditing_Changes earliest(_time) AS "first_access" latest(_time) as "last_access" SPLITROW action SPLITROW command SPLITROW user SPLITROW object SPLITROW change_type SPLITROW object_category SPLITROW dvc\ +| table first_access,last_access,user,command,action,dvc\ +| outputlookup last_access_by_user diff --git a/github_app_for_splunk/default/tags.conf b/github_app_for_splunk/default/tags.conf new file mode 100644 index 0000000..b4a35e8 --- /dev/null +++ b/github_app_for_splunk/default/tags.conf @@ -0,0 +1,60 @@ +[sourcetype =%20github_audit] + +[sourcetype=github_audit] +audit = enabled +change = enabled + +[eventtype=GitHub%3A%3AVulnerabilityAlert] +report = enabled +vulnerability = enabled + +[eventtype=GitHub%3A%3AChange] +change = enabled +audit = enabled + +[eventtype=GitHub%3A%3ACodeVulnerability] +report = enabled +vulnerability = enabled + +[eventtype=GitHub%3A%3AIssue] +issue = enabled +github = enabled + +[eventtype=GitHub%3A%3AIssue%3A%3AComment] +issue = enabled +comment = enabled +github = enabled + +[eventtype=GitHub%3A%3APullRequest] +pull-request = enabled +code = enabled +github = enabled + +[eventtype=GitHub%3A%3APullRequest%3A%3AReview] +pull-request = enabled +review = enabled +code = enabled +github = enabled + +[eventtype=GitHub%3A%3APush] +code = enabled +push = enabled +github = enabled + +[eventtype=GitHub%3A%3ARelease] +code = enabled +release = enabled +github = enabled + +[eventtype=GitHub%3A%3ASecretScanning] +report = enabled +secret = enabled +alert = disabled +vulnerability = enabled + +[eventtype=github%3Aenterprise%3Aauthentication] +authentication = enabled + +[eventtype=github_fork] +audit = enabled +change = enabled diff --git a/github_app_for_splunk/default/transforms.conf b/github_app_for_splunk/default/transforms.conf new file mode 100644 index 0000000..37537ed --- /dev/null +++ b/github_app_for_splunk/default/transforms.conf @@ -0,0 +1,13 @@ +[extractIssueID] +REGEX = \"(message|body)\"\:\"[^\"]*(?:[Cc]los(?:e[sd]?|ing)|[Ff]ix(?:e[sd]|ing)?|[Rr]esolv(?:e[sd]?|ing)?|[Ww]ork\s(?:[Ff]or|[Oo]n)?)\s(?:[A-Za-z\#\/_-]*)(?[0-9]+)[^\"]*\" +MV_ADD = true + +[action] +DELIMS = . +FIELDS = change_type,command +SOURCE_KEY = action + + +[issueNumber] +MV_ADD = 1 +REGEX = (?(?<=refs\/heads\/|\"ref\":\")[\d]*) \ No newline at end of file diff --git a/github_app_for_splunk/metadata/default.meta b/github_app_for_splunk/metadata/default.meta index b77b8cb..ba4dfaa 100644 --- a/github_app_for_splunk/metadata/default.meta +++ b/github_app_for_splunk/metadata/default.meta @@ -2,13 +2,19 @@ # Application-level permissions [] -access = read : [ * ], write : [ admin, power ] +access = read : [ * ], write : [ admin, sc_admin, power ] +export = system ### EVENT TYPES [eventtypes] export = system +### TAGS + +[tags] +export = system + ### PROPS @@ -33,3 +39,8 @@ export = system [viewstates] access = read : [ * ], write : [ * ] export = system + +### MACROS + +[macros] +export = system pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy