Skip to content

Commit

Permalink
Merge branch 'development' of https://github.com/mmguero-dev/Malcolm
Browse files Browse the repository at this point in the history
…into staging
  • Loading branch information
mmguero committed Nov 14, 2024
2 parents d4765bc + 0aecb45 commit 2383599
Show file tree
Hide file tree
Showing 53 changed files with 1,385 additions and 647 deletions.
2 changes: 1 addition & 1 deletion Dockerfiles/arkime.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1

ENV ARKIME_DIR "/opt/arkime"
ENV ARKIME_VERSION "5.4.0"
ENV ARKIME_VERSION "5.5.0"
ENV ARKIME_DEB_URL "https://github.com/arkime/arkime/releases/download/v${ARKIME_VERSION}/arkime_${ARKIME_VERSION}-1.debian12_XXX.deb"
ENV ARKIME_JA4_SO_URL "https://github.com/arkime/arkime/releases/download/v${ARKIME_VERSION}/ja4plus.XXX.so"
ENV ARKIME_LOCALELASTICSEARCH no
Expand Down
10 changes: 5 additions & 5 deletions Dockerfiles/dashboards.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM opensearchproject/opensearch-dashboards:2.17.1
FROM opensearchproject/opensearch-dashboards:2.18.0

LABEL maintainer="[email protected]"
LABEL org.opencontainers.image.authors='[email protected]'
Expand Down Expand Up @@ -42,10 +42,10 @@ RUN export BINARCH=$(uname -m | sed 's/x86_64/amd64/' | sed 's/aarch64/arm64/')
# Malcolm manages authentication and encryption via NGINX reverse proxy
/usr/share/opensearch-dashboards/bin/opensearch-dashboards-plugin remove securityDashboards --allow-root && \
cd /tmp && \
# unzip transformVis.zip opensearch-dashboards/transformVis/opensearch_dashboards.json opensearch-dashboards/transformVis/package.json && \
# sed -i "s/2\.16\.0/2\.17\.0/g" opensearch-dashboards/transformVis/opensearch_dashboards.json && \
# sed -i "s/2\.16\.0/2\.17\.0/g" opensearch-dashboards/transformVis/package.json && \
# zip transformVis.zip opensearch-dashboards/transformVis/opensearch_dashboards.json opensearch-dashboards/transformVis/package.json && \
unzip transformVis.zip opensearch-dashboards/transformVis/opensearch_dashboards.json opensearch-dashboards/transformVis/package.json && \
sed -i "s/2\.17\.1/2\.18\.0/g" opensearch-dashboards/transformVis/opensearch_dashboards.json && \
sed -i "s/2\.17\.1/2\.18\.0/g" opensearch-dashboards/transformVis/package.json && \
zip transformVis.zip opensearch-dashboards/transformVis/opensearch_dashboards.json opensearch-dashboards/transformVis/package.json && \
cd /usr/share/opensearch-dashboards/plugins && \
/usr/share/opensearch-dashboards/bin/opensearch-dashboards-plugin install file:///tmp/transformVis.zip --allow-root && \
rm -rf /tmp/transformVis /tmp/opensearch-dashboards && \
Expand Down
7 changes: 3 additions & 4 deletions Dockerfiles/filebeat.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM docker.elastic.co/beats/filebeat-oss:8.15.3
FROM docker.elastic.co/beats/filebeat-oss:8.16.0

# Copyright (c) 2024 Battelle Energy Alliance, LLC. All rights reserved.
LABEL maintainer="[email protected]"
Expand Down Expand Up @@ -100,15 +100,14 @@ RUN export EVTXARCH=$(uname -m | sed 's/arm64/aarch64/') && \
psmisc \
python3-pip \
python3-setuptools \
python3.9 \
python3 \
rsync \
tar \
tini \
unar \
unzip \
xz-utils && \
ln -s -f -r /usr/bin/python3.9 /usr/bin/python3 && \
python3.9 -m pip install --no-compile --no-cache-dir patool entrypoint2 pyunpack python-magic ordered-set supervisor watchdog==6.0.0 && \
python3 -m pip install --no-compile --no-cache-dir --break-system-packages patool entrypoint2 pyunpack python-magic ordered-set supervisor watchdog==6.0.0 && \
curl -fsSL -o /usr/local/bin/supercronic "${SUPERCRONIC_URL}${BINARCH}" && \
chmod +x /usr/local/bin/supercronic && \
curl -fsSL -o /usr/local/bin/yq "${YQ_URL}${BINARCH}" && \
Expand Down
2 changes: 1 addition & 1 deletion Dockerfiles/logstash.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM docker.elastic.co/logstash/logstash-oss:8.15.3
FROM docker.elastic.co/logstash/logstash-oss:8.16.0

LABEL maintainer="[email protected]"
LABEL org.opencontainers.image.authors='[email protected]'
Expand Down
2 changes: 1 addition & 1 deletion Dockerfiles/opensearch.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM opensearchproject/opensearch:2.17.1
FROM opensearchproject/opensearch:2.18.0

# Copyright (c) 2024 Battelle Energy Alliance, LLC. All rights reserved.
LABEL maintainer="[email protected]"
Expand Down
9 changes: 6 additions & 3 deletions Dockerfiles/zeek.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ RUN export BINARCH=$(uname -m | sed 's/x86_64/amd64/' | sed 's/aarch64/arm64/')
python3-setuptools \
python3-tz \
python3-wheel \
python3-yaml \
python3-zmq \
rsync \
supervisor \
Expand Down Expand Up @@ -139,6 +140,7 @@ RUN export BINARCH=$(uname -m | sed 's/x86_64/amd64/' | sed 's/aarch64/arm64/')
( find "${ZEEK_DIR}"/lib/zeek/plugins/packages -type f -name "*.hlto" -exec chmod 755 "{}" \; || true ) && \
mkdir -p "${ZEEK_DIR}"/share/zeek/site/intel/STIX && \
mkdir -p "${ZEEK_DIR}"/share/zeek/site/intel/MISP && \
mkdir -p "${ZEEK_DIR}"/share/zeek/site/intel/Mandiant && \
mkdir -p "${ZEEK_DIR}"/share/zeek/site/custom && \
touch "${ZEEK_DIR}"/share/zeek/site/intel/__load__.zeek && \
touch "${ZEEK_DIR}"/share/zeek/site/custom/__load__.zeek && \
Expand Down Expand Up @@ -201,8 +203,9 @@ ARG ZEEK_PCAP_PROCESSOR=true
#Whether or not to run "zeek -r XXXXX.pcap local" on each pcap file
ARG ZEEK_AUTO_ANALYZE_PCAP_FILES=false
ARG ZEEK_AUTO_ANALYZE_PCAP_THREADS=1
#Whether or not to refresh intel at various points during processing
ARG ZEEK_INTEL_REFRESH_ON_ENTRYPOINT=false
#Whether or not to do first intel refresh under supervisord
ARG ZEEK_INTEL_REFRESH_ON_STARTUP=false
#Whether or not to do first intel refresh under zeekdeploy.sh
ARG ZEEK_INTEL_REFRESH_ON_DEPLOY=false
ARG ZEEK_INTEL_REFRESH_CRON_EXPRESSION=
ARG ZEEK_INTEL_ITEM_EXPIRATION=-1min
Expand All @@ -225,7 +228,7 @@ ARG PCAP_NODE_NAME=malcolm

ENV AUTO_TAG $AUTO_TAG
ENV ZEEK_PCAP_PROCESSOR $ZEEK_PCAP_PROCESSOR
ENV ZEEK_INTEL_REFRESH_ON_ENTRYPOINT $ZEEK_INTEL_REFRESH_ON_ENTRYPOINT
ENV ZEEK_INTEL_REFRESH_ON_STARTUP $ZEEK_INTEL_REFRESH_ON_STARTUP
ENV ZEEK_INTEL_REFRESH_ON_DEPLOY $ZEEK_INTEL_REFRESH_ON_DEPLOY
ENV ZEEK_INTEL_REFRESH_CRON_EXPRESSION $ZEEK_INTEL_REFRESH_CRON_EXPRESSION
ENV ZEEK_AUTO_ANALYZE_PCAP_FILES $ZEEK_AUTO_ANALYZE_PCAP_FILES
Expand Down
112 changes: 95 additions & 17 deletions api/project/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,15 +169,6 @@
missing_field_map['ip'] = '0.0.0.0'
missing_field_map['long'] = 0

logstash_default_pipelines = [
"malcolm-beats",
"malcolm-enrichment",
"malcolm-input",
"malcolm-output",
"malcolm-suricata",
"malcolm-zeek",
]

urllib3.disable_warnings()
warnings.filterwarnings(
"ignore",
Expand Down Expand Up @@ -937,7 +928,7 @@ def ready():
logstash_lumberjack
true or false, the ready status of Logstash's lumberjack protocol listener
logstash_pipelines
true or false, the ready status of Logstash's default pipelines
true or false, the ready status of Logstash's pipelines
netbox
true or false, the ready status of NetBox
opensearch
Expand Down Expand Up @@ -998,9 +989,9 @@ def ready():
print(f"{type(e).__name__}: {str(e)} getting freq status")

try:
logstashStats = requests.get(f'{logstashUrl}/_node').json()
logstashHealth = requests.get(f'{logstashUrl}/_health_report').json()
except Exception as e:
logstashStats = {}
logstashHealth = {}
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting Logstash node status")

Expand Down Expand Up @@ -1057,11 +1048,8 @@ def ready():
filebeat_tcp=filebeatTcpJsonStatus,
freq=freqStatus,
logstash_lumberjack=logstashLJStatus,
logstash_pipelines=(malcolm_utils.deep_get(logstashStats, ["status"]) == "green")
and all(
pipeline in malcolm_utils.deep_get(logstashStats, ["pipelines"], {})
for pipeline in logstash_default_pipelines
),
logstash_pipelines=(malcolm_utils.deep_get(logstashHealth, ["status"]) == "green")
and (malcolm_utils.deep_get(logstashHealth, ["indicators", "pipelines", "status"]) == "green"),
netbox=bool(
isinstance(netboxStatus, dict)
and netboxStatus
Expand All @@ -1074,6 +1062,96 @@ def ready():
)


@app.route(
f"{('/' + app.config['MALCOLM_API_PREFIX']) if app.config['MALCOLM_API_PREFIX'] else ''}/dashboard-export/<dashid>",
methods=['GET', 'POST'],
)
def dashboard_export(dashid):
"""Uses the opensearch dashboards API to export a dashboard. Also handles the _REPLACER strings
as described in "Adding new visualizations and dashboards" at
https://idaholab.github.io/Malcolm/docs/contributing-dashboards.html#DashboardsNewViz
Parameters
----------
dashid : string
the ID of the dashboard to export
request : Request
Uses 'replace' from requests arguments, true (default) or false; indicates whether or not to do
MALCOLM_NETWORK_INDEX_PATTERN_REPLACER, MALCOLM_NETWORK_INDEX_TIME_FIELD_REPLACER,
MALCOLM_OTHER_INDEX_PATTERN_REPLACER
Returns
-------
content
The JSON of the exported dashboard
"""

args = get_request_arguments(request)
try:
# call the API to get the dashboard JSON
response = requests.get(
f"{dashboardsUrl}/api/{'kibana' if (databaseMode == malcolm_utils.DatabaseMode.ElasticsearchRemote) else 'opensearch-dashboards'}/dashboards/export",
params={
'dashboard': dashid,
},
auth=opensearchReqHttpAuth,
verify=opensearchSslVerify,
)
response.raise_for_status()

if doReplacers := malcolm_utils.str2bool(args.get('replace', 'true')):
# replace references to index pattern names with the _REPLACER strings, which will allow other Malcolm
# instances that use different index pattern names to import them and substitute their own names
replacements = {
app.config['MALCOLM_NETWORK_INDEX_PATTERN']: 'MALCOLM_NETWORK_INDEX_PATTERN_REPLACER',
app.config['MALCOLM_NETWORK_INDEX_TIME_FIELD']: 'MALCOLM_NETWORK_INDEX_TIME_FIELD_REPLACER',
app.config['MALCOLM_OTHER_INDEX_PATTERN']: 'MALCOLM_OTHER_INDEX_PATTERN_REPLACER',
}
pattern = re.compile('|'.join(re.escape(key) for key in replacements))
responseText = pattern.sub(lambda match: replacements[match.group(0)], response.text)
else:
# ... or just return it as-is
responseText = response.text

# remove index pattern definition from exported dashboard as they get created programatically
# on Malcolm startup and we don't want them to come in with imported dashboards
if responseParsed := malcolm_utils.LoadStrIfJson(responseText):
if 'objects' in responseParsed and isinstance(responseParsed['objects'], list):
responseParsed['objects'] = [
o
for o in responseParsed['objects']
if not (
(o.get("type") == "index-pattern")
and (
o.get("id")
in [
(
"MALCOLM_NETWORK_INDEX_PATTERN_REPLACER"
if doReplacers
else app.config['MALCOLM_NETWORK_INDEX_PATTERN']
),
(
"MALCOLM_OTHER_INDEX_PATTERN_REPLACER"
if doReplacers
else app.config['MALCOLM_OTHER_INDEX_PATTERN']
),
]
)
)
]
return jsonify(responseParsed)

else:
# what we got back from the API wasn't valid JSON, so sad
return jsonify(error=f'Could not process export response for {dashid}')

except Exception as e:
errStr = f"{type(e).__name__}: {str(e)} exporting OpenSearch Dashboard {dashid}"
if debugApi:
print(errStr)
return jsonify(error=errStr)


@app.route(
f"{('/' + app.config['MALCOLM_API_PREFIX']) if app.config['MALCOLM_API_PREFIX'] else ''}/ingest-stats",
methods=['GET'],
Expand Down
4 changes: 2 additions & 2 deletions api/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@ opensearch-py==2.6.0
requests==2.32.0
regex==2022.3.2
dateparser==1.1.1
elasticsearch==8.15.1
elasticsearch-dsl==8.15.4
elasticsearch==8.16.0
elasticsearch-dsl==8.16.0
psutil==5.9.8
29 changes: 19 additions & 10 deletions arkime/etc/config.ini

Large diffs are not rendered by default.

23 changes: 14 additions & 9 deletions arkime/wise/source.zeeklogs.js
Original file line number Diff line number Diff line change
Expand Up @@ -1666,19 +1666,10 @@ class MalcolmSource extends WISESource {
"zeek.http.user_agent",
"zeek.http.version",
"zeek.intel.cif_confidence",
"zeek.intel.cif_description",
"zeek.intel.cif_firstseen",
"zeek.intel.cif_lastseen",
"zeek.intel.cif_source",
"zeek.intel.cif_tags",
"zeek.intel.file_description",
"zeek.intel.file_mime_type",
"zeek.intel.matched",
"zeek.intel.seen_indicator",
"zeek.intel.seen_indicator_type",
"zeek.intel.seen_node",
"zeek.intel.seen_where",
"zeek.intel.sources",
"zeek.ipsec.certificates",
"zeek.ipsec.doi",
"zeek.ipsec.exchange_type",
Expand Down Expand Up @@ -2304,6 +2295,20 @@ class MalcolmSource extends WISESource {
"zeek.opcua_binary_variant_metadata.variant_data_array_dim",
"zeek.opcua_binary_variant_metadata.variant_data_source",
"zeek.opcua_binary_variant_metadata.variant_data_source_str",
"zeek.opcua_binary_write.node_id_encoding_mask",
"zeek.opcua_binary_write.node_id_namespace_idx",
"zeek.opcua_binary_write.node_id_numeric",
"zeek.opcua_binary_write.node_id_string",
"zeek.opcua_binary_write.node_id_guid",
"zeek.opcua_binary_write.node_id_opaque",
"zeek.opcua_binary_write.attribute_id",
"zeek.opcua_binary_write.attribute_id_str",
"zeek.opcua_binary_write.index_range",
"zeek.opcua_binary_write.data_value_encoding_mask",
"zeek.opcua_binary_write.source_timestamp",
"zeek.opcua_binary_write.source_pico_sec",
"zeek.opcua_binary_write.server_timestamp",
"zeek.opcua_binary_write.server_pico_sec",
"zeek.ospf.advert_router",
"zeek.ospf.area_id",
"zeek.ospf.backup_router",
Expand Down
2 changes: 1 addition & 1 deletion config/logstash.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ LOGSTASH_NETBOX_ENRICHMENT_DATASETS=suricata.alert,zeek.conn,zeek.dhcp,zeek.dns,
# Zeek log types that will be ignored (dropped) by LogStash
LOGSTASH_ZEEK_IGNORED_LOGS=analyzer,broker,cluster,config,loaded_scripts,packet_filter,png,print,prof,reporter,stderr,stdout
# Logstash memory allowance and other Java options
LS_JAVA_OPTS=-server -Xmx2500m -Xms2500m -Xss1536k -XX:-HeapDumpOnOutOfMemoryError -Djava.security.egd=file:/dev/./urandom -Dlog4j.formatMsgNoLookups=true
LS_JAVA_OPTS=-server -Xmx2500m -Xms2500m -Xss1536k -XX:-HeapDumpOnOutOfMemoryError -Djava.security.egd=file:/dev/./urandom -Dlog4j.formatMsgNoLookups=true -Dlogstash.pipelinebus.implementation=v1
6 changes: 3 additions & 3 deletions config/zeek-offline.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ ZEEK_ROTATED_PCAP=true

ZEEK_PCAP_PROCESSOR=true

# Specifies whether or not to refresh Zeek Intelligence Framework files in
# the container entrypoint
ZEEK_INTEL_REFRESH_ON_ENTRYPOINT=true
# Specifies whether or not to refresh Zeek Intelligence Framework files
# as soon as the container starts up
ZEEK_INTEL_REFRESH_ON_STARTUP=true
# Specifies a cron expression indicating the refresh interval for generating the
# Zeek Intelligence Framework files (or blank to disable automatic refresh)
ZEEK_INTEL_REFRESH_CRON_EXPRESSION=
4 changes: 2 additions & 2 deletions config/zeek.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ ZEEK_LOCAL_NETS=
ZEEK_JSON=
# Specifies the value for Zeek's Intel::item_expiration timeout (-1min to disable)
ZEEK_INTEL_ITEM_EXPIRATION=-1min
# When querying a TAXII or MISP feed, only process threat indicators that have
# When querying a threat intelligence feed, only process threat indicators that have
# been created or modified since the time represented by this value;
# it may be either a fixed date/time (01/01/2021) or relative interval (30 days ago)
ZEEK_INTEL_FEED_SINCE=
# Whether or not to require SSL certificate verification when querying a TAXII or MISP feed
# Whether or not to require SSL certificate verification when querying an intelligence feed
ZEEK_INTEL_FEED_SSL_CERTIFICATE_VERIFICATION=false
# Number of threads to use for querying feeds for generating Zeek Intelligence Framework files
ZEEK_INTEL_REFRESH_THREADS=2
Expand Down
Loading

0 comments on commit 2383599

Please sign in to comment.