Skip to content

Commit

Permalink
Merge branch 'master' into v3-homepage
Browse files Browse the repository at this point in the history
  • Loading branch information
Annoraaq committed Nov 29, 2024
2 parents cb34713 + 990a228 commit f3c5b13
Show file tree
Hide file tree
Showing 57 changed files with 1,506 additions and 143 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/linters.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-20.04, ubuntu-22.04]
python-version: ['3.8', '3.10']
python-version: ['3.9', '3.10']

steps:
- uses: actions/checkout@v2
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-20.04, ubuntu-22.04]
python-version: ['3.8', '3.10']
python-version: ['3.9', '3.10']
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
Expand All @@ -22,10 +22,12 @@ jobs:
run: |
pip install pipenv
pipenv install -d
pipenv install -r test_requirements.txt
pipenv run pip install -r test_requirements.txt
- name: Check pytest installation
run: pipenv run pip show pytest
- name: Run unit tests
run: |
pipenv run python run_tests.py
pipenv run python3 run_tests.py
# Frontend tests (VueJS)
VueJS:
Expand Down
30 changes: 21 additions & 9 deletions api_client/python/timesketch_api_client/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,12 +493,19 @@ def _execute_query(self, file_name="", count=False):
"""Execute a search request and store the results.
Args:
file_name (str): optional file path to a filename that
file_name (str): Optional file path to a filename that
all the results will be saved to. If not provided
the results will be stored in the search object.
count (bool): optional boolean that determines whether
count (bool): Optional boolean that determines whether
we want to execute the query or only count the
number of events that the query would produce.
number of events that the query would produce. If
set to True, the results will be stored in the
search object, and the number of events will be
returned.
Returns:
A dict with the search results or the total number of events
(if count=True) or None if saved to file.
"""
query_filter = self.query_filter
if not isinstance(query_filter, dict):
Expand Down Expand Up @@ -531,14 +538,14 @@ def _execute_query(self, file_name="", count=False):
if file_name:
with open(file_name, "wb") as fw:
fw.write(response.content)
return
return None

response_json = error.get_response_json(response, logger)

if count:
meta = response_json.get("meta", {})
self._total_elastic_size = meta.get("total_count", 0)
return
return meta.get("total_count", 0)

scroll_id = response_json.get("meta", {}).get("scroll_id", "")
form_data["scroll_id"] = scroll_id
Expand Down Expand Up @@ -579,6 +586,7 @@ def _execute_query(self, file_name="", count=False):
)

self._raw_response = response_json
return response_json

def add_chip(self, chip):
"""Add a chip to the ..."""
Expand Down Expand Up @@ -647,7 +655,7 @@ def expected_size(self):
if self._total_elastic_size:
return self._total_elastic_size

self._execute_query(count=True)
_ = self._execute_query(count=True)
return self._total_elastic_size

def from_manual( # pylint: disable=arguments-differ
Expand Down Expand Up @@ -1074,8 +1082,10 @@ def scrolling_enable(self):

def to_dict(self):
"""Returns a dict with the respone of the query."""
if not self._raw_response:
if self._raw_response is None:
self._execute_query()
if self._raw_response is None:
raise ValueError("No results to return.")

return self._raw_response

Expand All @@ -1098,8 +1108,10 @@ def to_file(self, file_name):

def to_pandas(self):
"""Returns a pandas DataFrame with the response of the query."""
if not self._raw_response:
self._execute_query()
if self._raw_response is None:
self._raw_response = self._execute_query()
if self._raw_response is None:
raise ValueError("No results to return.")

return_list = []
timelines = {t.id: t.name for t in self._sketch.list_timelines()}
Expand Down
3 changes: 3 additions & 0 deletions api_client/python/timesketch_api_client/story.py
Original file line number Diff line number Diff line change
Expand Up @@ -729,6 +729,9 @@ def to_string(self):
string_list.append(block.text)
elif block.TYPE == "view":
search_obj = block.view
if search_obj is None:
logging.warning("Block has no view. Skipping")
continue
data_frame = search_obj.to_pandas()
string_list.append(data_frame.to_string(index=False))
elif block.TYPE == "aggregation":
Expand Down
2 changes: 1 addition & 1 deletion api_client/python/timesketch_api_client/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"""Version information for Timesketch API Client."""


__version__ = "20240828"
__version__ = "20241129"


def get_version():
Expand Down
16 changes: 12 additions & 4 deletions contrib/deploy_timesketch.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,16 @@
set -e

START_CONTAINER=
SKIP_CREATE_USER=

if [ "$1" == "--start-container" ]; then
START_CONTAINER=yes
fi
while [[ "$#" -gt 0 ]]; do
case $1 in
--start-container) START_CONTAINER=yes ;;
--skip-create-user) SKIP_CREATE_USER=yes ;;
*) echo "Unknown parameter passed: $1"; exit 1 ;;
esac
shift
done

# Exit early if run as non-root user.
if [ "$EUID" -ne 0 ]; then
Expand Down Expand Up @@ -150,7 +156,9 @@ else
exit
fi

read -p "Would you like to create a new timesketch user? [Y/n] (default:no)" CREATE_USER
if [ -z "$SKIP_CREATE_USER" ]; then
read -p "Would you like to create a new timesketch user? [y/N]" CREATE_USER
fi

if [ "$CREATE_USER" != "${CREATE_USER#[Yy]}" ] ;then
read -p "Please provide a new username: " NEWUSERNAME
Expand Down
5 changes: 5 additions & 0 deletions data/sigma_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,11 @@ logsources:
product: windows
conditions:
data_type: "windows:evtx:record"
service_windows_certificate_services:
service: certificateservicesclient-lifecycle-system
conditions:
source_name:
- "Microsoft-Windows-CertificateServicesClient-Lifecycle-System"
service_windows_security:
service: security
conditions:
Expand Down
80 changes: 80 additions & 0 deletions data/tags.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,83 @@ yara_match_tagger:
modifiers: ['split']
save_search: true
search_name: 'Yara rule matches'

aws_cloudtrail_readonly_true:
query_string: 'data_type:"aws:cloudtrail:entry" AND cloud_trail_event:"*readOnly\":true*"'
tags: ['readOnly_true']
emojis: ['MAGNIFYING_GLASS']
save_search: true
search_name: 'readOnly_true'

aws_cloudtrail_readonly_false:
query_string: 'data_type:"aws:cloudtrail:entry" AND cloud_trail_event:"*readOnly\":false*"'
tags: ['readOnly_false']
emojis: ['SPARKLES']
save_search: true
search_name: 'readOnly_false'

aws_cloudtrail_unauthorized_api_call:
query_string: 'data_type:"aws:cloudtrail:entry" AND cloud_trail_event: ("*errorCode\":\"AccessDenied*" OR "*errorCode\":\"UnauthorizedOperation*")'
tags: ['UnauthorizedAPICall']
save_search: true
search_name: 'UnauthorizedAPICall'

aws_cloudtrail_failed_login_non_existent_iam_user:
query_string: 'data_type:"aws:cloudtrail:entry" AND cloud_trail_event:"*userIdentity\":\"HIDDEN_DUE_TO_SECURITY_REASONS*" AND cloud_trail_event:"*errorMessage\":\"No username found in supplied account*"'
tags: ['FailedLoginNonExistentIAMUser']
save_search: true
search_name: 'FailedLoginNonExistentIAMUser'

aws_cloudtrail_security_group:
query_string: 'data_type:"aws:cloudtrail:entry" AND event_name: ("AuthorizeSecurityGroupEgress" OR "AuthorizeSecurityGroupIngress" OR "CreateSecurityGroup" OR "DeleteSecurityGroup" OR "ModifySecurityGroupRules" OR "RevokeSecurityGroupEgress" OR "RevokeSecurityGroupIngress")'
tags: ['NetworkChanged', 'SG']
save_search: true
search_name: 'NetworkChanged SecurityGroup'

aws_cloudtrail_network_acl:
query_string: 'data_type:"aws:cloudtrail:entry" AND event_name: ("CreateNetworkAcl" OR "CreateNetworkAclEntry" OR "DeleteNetworkAcl" OR "DeleteNetworkAclEntry" OR "ReplaceNetworkAclAssociation" OR "ReplaceNetworkAclEntry")'
tags: ['NetworkChanged', 'NACL']
save_search: true
search_name: 'NetworkChanged NetworkACl'

aws_cloudtrail_gateway:
query_string: 'data_type:"aws:cloudtrail:entry" AND event_name: (Accept* OR Associate* OR Attach* OR Create* OR Delete* OR Replace*) AND event_name:*Gateway'
tags: ['NetworkChanged', 'GW']
save_search: true
search_name: 'NetworkChanged GateWay'

aws_cloudtrail_routetable:
query_string: 'data_type:"aws:cloudtrail:entry" AND event_name: ("CreateRoute" OR "CreateRouteTable" OR "DeleteRoute" OR "DeleteRouteTable" OR "DisassociateRouteTable" OR "ReplaceRoute" OR "ReplaceRouteTableAssociation")'
tags: ['NetworkChanged', 'RouteTable']
save_search: true
search_name: 'NetworkChanged RouteTable'

aws_cloudtrail_vpc:
query_string: 'data_type:"aws:cloudtrail:entry" AND event_name: ("AcceptVpcPeeringConnection" OR "AttachClassicLinkVpc" OR "CreateVpc" OR "CreateVpcPeeringConnection" OR "DeleteVpc" OR "DeleteVpcPeeringConnection" OR "DetachClassicLinkVpc" OR "DisableVpcClassicLink" OR "EnableVpcClassicLink" OR "ModifyVpcAttribute" OR "RejectVpcPeeringConnection")'
tags: ['NetworkChanged', 'VPC']
save_search: true
search_name: 'NetworkChanged VPC'

aws_cloudtrail_suspicous_iam_activity:
query_string: 'data_type:"aws:cloudtrail:entry" AND event_name: ("AddRoleToInstanceProfile" OR "AddUserToGroup" OR "AssumeRole" OR "AttachGroupPolicy" OR "AttachRolePolicy" OR "AttachUserPolicy" OR "CreateAccessKey" OR "CreateLoginProfile" OR "CreatePolicyVersion" OR "CreateRole" OR "PassRole" OR "PutGroupPolicy" OR "PutRolePolicy" OR "PutUserPolicy" OR "SetDefaultPolicyVersion" OR "UpdateAccessKey" OR "UpdateLoginProfile" OR "GetFederationToken" )'
tags: ['SuspicousIAMActivity']
save_search: true
search_name: 'SuspicousIAMActivity'

aws_cloudtrail_suspicous_iam_identity_center_activity:
query_string: 'data_type:"aws:cloudtrail:entry" AND event_name: ("StartSSO" OR "CreateUser" OR "CreateGroup" OR "AddMemberToGroup" OR "CreatePermissionSet" OR "CreateAccountAssignment" OR "Authenticate" OR "Federate" OR "AssumeRoleWithSAML")'
tags: ['SuspicousIICActivity']
save_search: true
search_name: 'SuspicousIICActivity'

aws_cloudtrail_console_login:
query_string: 'data_type:"aws:cloudtrail:entry" AND event_name:"ConsoleLogin"'
tags: ['ConsoleLogin']
save_search: true
search_name: 'ConsoleLogin'

aws_cloudtrail_get_caller_identity:
query_string: 'data_type:"aws:cloudtrail:entry" AND event_name:"GetCallerIdentity"'
tags: ['GetCallerIdentity']
save_search: true
search_name: 'GetCallerIdentity'
2 changes: 1 addition & 1 deletion docker/release/config.env
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Timesketch version to run. Latest is build from the master branch and a release
# number is build from a release tag. Using latest means that you are running
# the bleeding edge version and we cannot guarantee that it will not be broken.
TIMESKETCH_VERSION=20240828
TIMESKETCH_VERSION=20241009

# Timesketch PATH local etc/timesketch
TIMESKETCH_CONFIG_PATH=./etc/timesketch
Expand Down
6 changes: 3 additions & 3 deletions docs/developers/getting-started.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ User <USER> created/updated

Now, start the `gunicon` server that will serve the Timsketch WSGI app.

To make this task easier, we recommend using the `timesketch/contrib/tsdev.sh`
To make this task easier, we recommend using the `timesketch/utils/tsdev.sh`
script.

In one shell:
Expand Down Expand Up @@ -92,7 +92,7 @@ If you're planning to work on those (or even just import timelines into your
Timesketch instance), you'll need to launch a Celery worker, and re-launch it
every time you bring changes to its code.
You can use `timesketch/contrib/tsdev.sh` for this task as well.
You can use `timesketch/utils/tsdev.sh` for this task as well.
In a new shell, run the following:
Expand Down Expand Up @@ -130,7 +130,7 @@ For development on the new `frontend-ng` UI, you need to install some
dependencies once and start the new frontend. More on frontend development is
documented [here](https://timesketch.org/developers/frontend-development/).
We recommend using the `timesketch/contrib/tsdev.sh` script for this task as well.
We recommend using the `timesketch/utils/tsdev.sh` script for this task as well.
Install frontend-ng dependencies:
```bash
Expand Down
4 changes: 2 additions & 2 deletions docs/guides/admin/troubleshooting.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ See [docs/learn/server-admin](docs/learn/server-admin#troubleshooting-database-s
- Is it a specific file that causes problems?
- What is the WebUI status of the import?
- Try switching from WebUI to the `import_client.py` to upload the same file
- Try to upload one of the [sample files](https://github.com/google/timesketch/blob/master/test_tools/test_events/sigma_events.csv)
- Try to upload one of the [sample files](https://github.com/google/timesketch/blob/master/tests/test_events/sigma_events.csv)
- If you open a Github issue for an import issue, please indicate, what type of file you try to upload and what error message / stacktrace you have

### Issues importing a CSV file
Expand All @@ -71,7 +71,7 @@ See [docs/learn/server-admin](docs/learn/server-admin#troubleshooting-database-s
- Is there an encoding issue in the CSV file
- If you tried to upload via web, try the import client and the other way around
- Check the celery logs
- Try to upload [This sample](https://github.com/google/timesketch/blob/master/test_tools/test_events/sigma_events.csv)
- Try to upload [This sample](https://github.com/google/timesketch/blob/master/tests/test_events/sigma_events.csv)
- If you open a Github issue, provide at least the header of your CSV and a few lines of content (please scramble PII) so it can be reproduced.

### Issues importing Plaso file
Expand Down
50 changes: 42 additions & 8 deletions importer_client/python/timesketch_import_client/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,41 @@

from timesketch_api_client import timeline
from timesketch_api_client import definitions
from timesketch_api_client.error import UnableToRunAnalyzer
from timesketch_import_client import utils

logger = logging.getLogger("timesketch_importer.importer")


def run_analyzers(analyzer_names=None, timeline_obj=None):
"""Run the analyzers on the uploaded timeline."""

if not timeline_obj:
logger.error("Unable to run analyzers: Timeline object not found.")
raise ValueError("Timeline object not found.")

if timeline_obj.status not in ("ready", "success"):
logger.error("The provided timeline '%s' is not ready yet!", timeline_obj.name)
return None

if not analyzer_names:
logger.info("No analyzer names provided, skipping analysis.")
return None

try:
analyzer_results = timeline_obj.run_analyzers(analyzer_names)
except UnableToRunAnalyzer as e:
logger.error(
"Failed to run requested analyzers '%s'! Error: %s",
str(analyzer_names),
str(e),
)
return None

logger.debug("Analyzer results: %s", analyzer_results)
return analyzer_results


class ImportStreamer(object):
"""Upload object used to stream results to Timesketch."""

Expand Down Expand Up @@ -708,8 +738,18 @@ def celery_task_id(self):
"""Return the celery task identification for the upload."""
return self._celery_task_id

def _trigger_analyzers(self, analyzer_names=None):
"""Run the analyzers on the uploaded timeline."""

self._ready()

if self._data_lines:
self.flush(end_stream=True)

return run_analyzers(analyzer_names=analyzer_names, timeline_obj=self.timeline)

def close(self):
"""Close the streamer."""
"""Close the streamer"""
try:
self._ready()
except ValueError:
Expand All @@ -718,13 +758,6 @@ def close(self):
if self._data_lines:
self.flush(end_stream=True)

# Trigger auto analyzer pipeline to kick in.
pipe_resource = "{0:s}/sketches/{1:d}/analyzer/".format(
self._sketch.api.api_root, self._sketch.id
)
data = {"index_name": self._index}
_ = self._sketch.api.session.post(pipe_resource, json=data)

def flush(self, end_stream=True):
"""Flushes the buffer and uploads to timesketch.
Expand All @@ -736,6 +769,7 @@ def flush(self, end_stream=True):
ValueError: if the stream object is not fully configured.
RuntimeError: if the stream was not uploaded.
"""

if not self._data_lines:
return

Expand Down
Loading

0 comments on commit f3c5b13

Please sign in to comment.