-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #34 from FireTail-io/gh-actionify
Gh actionify
- Loading branch information
Showing
15 changed files
with
483 additions
and
70 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
FROM golang:1.20.5-bullseye as build-golang | ||
COPY ./analysers/golang /src | ||
RUN cd /src && go build -buildmode=c-shared -o /dist/main.so | ||
|
||
FROM build-golang as test-golang | ||
RUN cd /src && go test -coverprofile=coverage.out ./... | ||
RUN cd /src && go tool cover -html coverage.out -o coverage.html | ||
|
||
FROM python:3.11-bullseye as build-tree-sitter | ||
RUN apt-get update -y && apt-get upgrade -y | ||
RUN mkdir /src && cd /src && git clone https://github.com/tree-sitter/tree-sitter-javascript --branch v0.20.2 --single-branch | ||
RUN python3 -m pip install tree_sitter==0.20.2 | ||
COPY ./analysers/tree-sitter/build.py /src/build.py | ||
RUN cd /src && python3 build.py | ||
|
||
FROM python:3.11-bullseye as build-python | ||
RUN apt-get update -y && apt-get upgrade -y | ||
COPY --from=build-golang /dist/main.so /analysers/golang/main.so | ||
COPY --from=build-tree-sitter /dist/languages.so /analysers/tree-sitter/languages.so | ||
COPY ./build_setup/requirements.txt /build_setup/requirements.txt | ||
RUN python3 -m pip install -r /build_setup/requirements.txt | ||
COPY ./src/ /github-api-discovery/src | ||
RUN rm -rf /build_setup | ||
|
||
FROM build-python as test | ||
COPY ./setup.cfg /github-api-discovery/setup.cfg | ||
RUN python3 -m pip install pytest pytest-cov | ||
COPY ./tests/ /github-api-discovery/tests/ | ||
RUN cd /github-api-discovery && pytest --cov . --cov-report=xml:coverage.xml -vv -x | ||
|
||
FROM build-python as runtime | ||
RUN chmod +x /github-api-discovery/src/main_githubaction.py | ||
CMD ["/github-api-discovery/src/main_githubaction.py"] | ||
ENTRYPOINT ["python"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
# action.yaml | ||
name: "FireTail Github Action" | ||
description: "A Github Action that will upload an API Spec file to the FireTail Platform and perform static analysis on your repository" | ||
inputs: | ||
FIRETAIL_API_TOKEN: | ||
description: "Your FireTail API token" | ||
required: true | ||
ORGANIZATION_UUID: | ||
description: "Your Firetail Organization UUID" | ||
required: true | ||
FIRETAIL_API_URL: | ||
description: "Your FireTail API token" | ||
required: false | ||
default: "https://api.saas.eu-west-1.prod.firetail.app" | ||
CONTEXT: | ||
required: false | ||
description: "provides the github context that gets passed with the api call. this allows for determining where the change came from and by which user" | ||
COLLECTION_UUID: | ||
description: "UUID of the FireTail API Collection to directly upload the API spec at API_SPEC_LOCATION to" | ||
required: false | ||
API_SPEC_LOCATION: | ||
description: "Path to your OpenAPI/Swagger spec file" | ||
required: false | ||
STATIC_ANALYSIS_ROOT_DIR: | ||
description: "The root directory in your repository to perform static analysis from" | ||
required: false | ||
default: "/" | ||
STATIC_ANALYSIS_LANGUAGES: | ||
description: "A comma separated list of languages to statically analyse (currently supported are Python, Golang and Javascript)" | ||
required: false | ||
default: "Python,Golang,Javascript" | ||
CRITICAL_FINDING_THRESHOLD: | ||
description: "Finding level for failing the action if there is more than this number of Critical findings" | ||
default: "1" | ||
HIGH_FINDING_THRESHOLD: | ||
description: "Finding level for failing the action if there is more than this number of Finding findings" | ||
default: "1" | ||
MEDIUM_FINDING_THRESHOLD: | ||
description: "Finding level for failing the action if there is more than this number of Medium findings" | ||
default: "4" | ||
LOW_FINDING_THRESHOLD: | ||
description: "Finding level for failing the action if there is more than this number of Low findings" | ||
default: "10" | ||
runs: | ||
using: "docker" | ||
image: "Dockerfile.githubaction" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,3 +5,4 @@ dacite==1.8.1 | |
tree_sitter==0.20.2 | ||
PyGithub==1.59.1 | ||
jsonschema==4.19.0 | ||
requests |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,5 @@ | ||
import time | ||
|
||
from scanning import scan | ||
from utils import logger | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,5 @@ | ||
import time | ||
|
||
from scanning import scan | ||
from utils import logger | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,172 @@ | ||
import datetime | ||
import json | ||
import os | ||
import time | ||
import uuid | ||
|
||
import requests | ||
|
||
from openapi.validation import parse_resolve_and_validate_openapi_spec | ||
from static_analysis import LANGUAGE_ANALYSERS | ||
from utils import ( | ||
GitHubContext, | ||
get_api_uuid_from_api_token, | ||
load_openapi_spec, | ||
logger, | ||
upload_api_spec_to_firetail_collection, | ||
upload_discovered_api_spec_to_firetail, | ||
) | ||
|
||
BASE_URL = os.environ.get("FIRETAIL_API_URL", "https://api.saas.eu-west-1.prod.firetail.app") | ||
|
||
|
||
def handler(): | ||
firetail_api_token = os.environ.get("FIRETAIL_API_TOKEN") | ||
if firetail_api_token is None: | ||
raise Exception("Missing environment variable 'FIRETAIL_API_TOKEN") | ||
external_uuids = [] | ||
last_time = time.time() | ||
context = os.environ.get("CONTEXT") | ||
if context: | ||
context = get_context(context) | ||
# If API_SPEC_LOCATION is set then we upload the OpenAPI spec at that location | ||
collection_uuid = os.environ.get("COLLECTION_UUID") | ||
org_uuid = os.environ.get("ORGANIZATION_UUID") | ||
api_spec_location = os.environ.get("API_SPEC_LOCATION") | ||
if api_spec_location is None: | ||
logger.info("API_SPEC_LOCATION is not set, skipping direct upload step.") | ||
elif collection_uuid is None: | ||
logger.info("COLLECTION_UUID is not set, skipping direct upload step.") | ||
else: | ||
# If we have a CONTEXT then we can add the API_SPEC_LOCATION to the file_urls | ||
if context is not None: | ||
context.file_urls.append(api_spec_location) | ||
openapi_spec = load_openapi_spec(api_spec_location) | ||
external_id = str(uuid.uuid4()) | ||
context.external_id = external_id | ||
upload_api_spec_to_firetail_collection( | ||
openapi_spec=openapi_spec, | ||
context=context, | ||
collection_uuid=collection_uuid, | ||
firetail_api_url=BASE_URL, | ||
firetail_api_token=firetail_api_token, | ||
) | ||
last_time = time.time() | ||
external_uuids.append(external_id) | ||
logger.info(f"Successfully uploaded OpenAPI spec to Firetail: {api_spec_location}") | ||
static_analysis_root_dir = os.environ.get("STATIC_ANALYSIS_ROOT_DIR", "/") | ||
static_analysis_languages = map( | ||
lambda v: v.strip(), os.environ.get("STATIC_ANALYSIS_LANGUAGES", "Python,Golang,Javascript").split(",") | ||
) | ||
logger.info(f"Statically analysing files under {static_analysis_root_dir}...") | ||
for path, _, filenames in os.walk(static_analysis_root_dir): | ||
for filename in filenames: | ||
full_path = f"{path}/{filename}" | ||
logger.info(f"Statically analysing {full_path}...") | ||
try: | ||
file_contents = open(full_path, "r").read() | ||
except Exception as e: # noqa: E722 | ||
logger.critical(f"{full_path}: Could not read, exception: {e}") | ||
continue | ||
# Check if the file is an openapi spec first. If it is, there's no point doing expensive static analysis. | ||
openapi_spec = parse_resolve_and_validate_openapi_spec(full_path, lambda: file_contents) | ||
if openapi_spec is not None: | ||
logger.info(f"{full_path}: Detected OpenAPI spec, uploading to Firetail...") | ||
external_uuid = str(uuid.uuid4()) | ||
context.external_id = external_uuid | ||
upload_discovered_api_spec_to_firetail( | ||
source=full_path, | ||
openapi_spec=openapi_spec, | ||
api_uuid=get_api_uuid_from_api_token(firetail_api_token), | ||
firetail_api_url=BASE_URL, | ||
firetail_api_token=firetail_api_token, | ||
) | ||
external_uuids.append(external_uuid) | ||
last_time = time.time() | ||
continue | ||
|
||
for language, language_analysers in LANGUAGE_ANALYSERS.items(): | ||
if language not in static_analysis_languages: | ||
continue | ||
|
||
for language_analyser in language_analysers: | ||
try: | ||
_, openapi_specs_from_analysis = language_analyser(full_path, file_contents) | ||
except Exception as e: | ||
logger.critical(f"{full_path}: Could not analyse, exception: {e}") | ||
continue | ||
for openapi_spec_source, openapi_spec in openapi_specs_from_analysis.items(): | ||
logger.info(f"{full_path}: Created OpenAPI spec via {language} static analysis...") | ||
external_uuid = str(uuid.uuid4()) | ||
context.external_id = external_uuid | ||
upload_discovered_api_spec_to_firetail( | ||
source=openapi_spec_source, | ||
openapi_spec=openapi_spec, | ||
api_uuid=get_api_uuid_from_api_token(firetail_api_token), | ||
firetail_api_url=BASE_URL, | ||
firetail_api_token=firetail_api_token, | ||
) | ||
external_uuids.append(external_uuid) | ||
last_time = time.time() | ||
|
||
if not external_uuids: | ||
# We don't have anything else to check, just return. | ||
return | ||
# We have external IDs now check for finding counts | ||
wait_time = os.environ.get("FINDING_TIMEOUT_SECONDS", 60) | ||
while True: | ||
# we loop until we have elapsed the timeout | ||
if (time.time() - last_time) > wait_time: | ||
break | ||
|
||
for ex_id in external_uuids: | ||
if findings_breach_threshold(ex_id, org_uuid, firetail_api_token): | ||
raise Exception("Error - This action found errors with your spec") | ||
|
||
|
||
def get_context(context): | ||
context = json.loads(context) | ||
return GitHubContext( | ||
sha=context.get("sha", ""), | ||
repositoryId=context.get("repository_id", ""), | ||
repositoryName=context.get("event", {}).get("repository", {}).get("name", ""), | ||
repositoryOwner=context.get("repository_owner", ""), | ||
ref=context.get("ref", ""), | ||
headCommitUsername=context.get("event", {}).get("head_commit", {}).get("author", {}).get("username", ""), | ||
actor=context.get("actor", ""), | ||
actorId=context.get("actor_id", ""), | ||
workflowRef=context.get("workflow_ref", ""), | ||
eventName=context.get("event_name", ""), | ||
private=context.get("event", {}).get("repository", {}).get("private"), | ||
runId=context.get("run_id"), | ||
timeTriggered=int(time.time() * 1000 * 1000), | ||
timeTriggeredUTCString=datetime.datetime.now(datetime.timezone.utc).isoformat(), | ||
external_id=context.get("external_id", ""), | ||
file_urls=[], | ||
) | ||
|
||
|
||
def get_thresholds() -> dict: | ||
critical = os.environ.get("CRITICAL_FINDING_THRESHOLD", 1) | ||
high = os.environ.get("HIGH_FINDING_THRESHOLD", 1) | ||
medium = os.environ.get("MEDIUM_FINDING_THRESHOLD", 4) | ||
low = os.environ.get("LOW_FINDING_THRESHOLD", 10) | ||
return {"CRITICAL": critical, "HIGH": high, "MEDIUM": medium, "LOW": low} | ||
|
||
|
||
def findings_breach_threshold(ex_id: str, org_uuid: str, api_token: str): | ||
endpoint = f"{BASE_URL}/organisations/{org_uuid}/events/external-id/{ex_id}" | ||
event_resp = requests.get(endpoint, headers={"x-ft-api-key": api_token, "Content-Type": "application/json"}) | ||
if event_resp.status_code != 200: # pragma: nocover | ||
print("ERROR", {"message": "Non 200 response from events", "resp": event_resp, "resp_text": event_resp.text}) | ||
return | ||
thresholds = get_thresholds() | ||
print("Event resp json was -> ", event_resp.json()) | ||
findings = event_resp.json().get("initialFindingSeverities", {}) | ||
for level, limit in thresholds.items(): | ||
if findings.get(level, 0) > limit: | ||
raise Exception(f"Findings breached limit: {findings}") | ||
|
||
|
||
if __name__ == "__main__": | ||
handler() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.