Skip to content

Commit

Permalink
Merge main into new-feature/mcus/49
Browse files Browse the repository at this point in the history
  • Loading branch information
mergebotmikroe[bot] authored Dec 18, 2024
2 parents b7e7423 + d3efc61 commit f5d6a21
Show file tree
Hide file tree
Showing 11 changed files with 188 additions and 24 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/MCUsReleaseLive.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ jobs:
ES_INDEX_LIVE: ${{ secrets.ES_INDEX_LIVE }}
run: |
echo "Indexing to Live."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} "False" ${{ github.event.inputs.release_version }} "False" "False"
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} ${{ secrets.PROG_DEBUG_CODEGRIP }} "False" ${{ github.event.inputs.release_version }} "False" "False"
- name: Send notification to Mattermost
if: ${{ github.event.inputs.notify_channel == 'true' }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/MCUsReleaseTest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ jobs:
ES_INDEX_LIVE: ${{ secrets.ES_INDEX_LIVE }}
run: |
echo "Indexing to Test."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} "False" ${{ github.event.inputs.release_version }} "False" "False"
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} ${{ secrets.PROG_DEBUG_CODEGRIP }} "False" ${{ github.event.inputs.release_version }} "False" "False"
- name: Notify Mattermost - Test ready
env:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/checkIndexes.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ on:
regex:
type: string
description: Regex to use when searching for indexed items
default: "arm_gcc_clang|arm_mikroc|clocks|database|dspic|^images$|mikroe_utils|avr|pic|preinit|riscv|schemas|unit_test_lib|.+[device|tool]_support$"
default: "arm_gcc_clang|arm_mikroc|clocks|database|dspic|^images$|mikroe_utils|avr|pic|preinit|riscv|schemas|unit_test_lib|.+[device|tool]_support$|^codegrip_pack"
fix:
type: boolean
description: Fix the broken links with new ones?
Expand All @@ -31,7 +31,7 @@ on:
- cron: "0/30 7-16 * * 1-5" # Every 30 minutes, between 07:00 AM and 04:59 PM, Monday through Friday

env:
GLOBAL_REGEX: "arm_gcc_clang|arm_mikroc|clocks|database|dspic|^images$|mikroe_utils|avr|pic|preinit|riscv|schemas|unit_test_lib|.+[device|tool]_support$"
GLOBAL_REGEX: "arm_gcc_clang|arm_mikroc|clocks|database|dspic|^images$|mikroe_utils|avr|pic|preinit|riscv|schemas|unit_test_lib|.+[device|tool]_support$|^codegrip_pack"

jobs:
manual_run:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/index.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,13 @@ jobs:
run: |
if [[ ${{ github.event.inputs.select_index }} == "Live" ]]; then
echo "Indexing to Live."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} ${{ github.event.inputs.force_index }} ${{ github.event.inputs.release_version }} False ${{ github.event.inputs.set_as_latest }}
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} ${{ secrets.PROG_DEBUG_CODEGRIP }} ${{ github.event.inputs.force_index }} ${{ github.event.inputs.release_version }} False ${{ github.event.inputs.set_as_latest }}
else
echo "Indexing to Test."
if [[ ${{ github.event.inputs.set_as_latest }} ]]; then
echo "Promote to latest requested, but ignored. Only available for LIVE updates."
fi
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} ${{ github.event.inputs.force_index }} ${{ github.event.inputs.release_version }} False False
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} ${{ secrets.PROG_DEBUG_CODEGRIP }} ${{ github.event.inputs.force_index }} ${{ github.event.inputs.release_version }} False False
fi
- name: Send notification to Mattermost
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/updateDb.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,6 @@ jobs:
ES_PASSWORD: ${{ secrets.ES_PASSWORD }}
run: |
echo "Indexing database to TEST."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} False ${{ github.event.inputs.release_version }} True False
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} ${{ secrets.PROG_DEBUG_CODEGRIP }} False ${{ github.event.inputs.release_version }} True False
echo "Indexing database to LIVE."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} False ${{ github.event.inputs.release_version }} True False
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} ${{ secrets.PROG_DEBUG_CODEGRIP }} False ${{ github.event.inputs.release_version }} True False
4 changes: 2 additions & 2 deletions .github/workflows/updateDbDevices.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ jobs:
ES_PASSWORD: ${{ secrets.ES_PASSWORD }}
run: |
echo "Indexing database to TEST."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} "True" "latest" "True" "False"
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} ${{ secrets.PROG_DEBUG_CODEGRIP }} "True" "latest" "True" "False"
Update_Devices_in_Live_Database:
if: ${{ github.event.inputs.select_index == 'Live' }}
Expand Down Expand Up @@ -151,4 +151,4 @@ jobs:
ES_PASSWORD: ${{ secrets.ES_PASSWORD }}
run: |
echo "Indexing database to LIVE."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} "True" "latest" "True" "False"
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} ${{ secrets.PROG_DEBUG_CODEGRIP }} "True" "latest" "True" "False"
4 changes: 2 additions & 2 deletions .github/workflows/updateDbFromSdk.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,9 @@ jobs:
run: |
if [[ ${{ steps.mikrosdk_payload.outputs.index }} == "Test" ]]; then
echo "Indexing database to TEST."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} "True" "latest" "True" "False"
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_TEST }} ${{ secrets.PROG_DEBUG_CODEGRIP }} "True" "latest" "True" "False"
fi
if [[ ${{ steps.mikrosdk_payload.outputs.index }} == "Live" ]]; then
echo "Indexing database to LIVE."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} "True" "latest" "True" "False"
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} ${{ secrets.PROG_DEBUG_CODEGRIP }} "True" "latest" "True" "False"
fi
2 changes: 1 addition & 1 deletion scripts/check_indexes.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def str2bool(v):
print("%sWARNING: Asset \"%s\" has no \"gh_package_name\" in the index." % (es_instance.Colors.WARNING, indexed_item['source']['name']))
else: ## code 200 - success, no need to reindex
if args.index_package_names:
if 'packs.download.microchip.com' in indexed_item['source']['download_link']:
if 'packs.download.microchip.com' in indexed_item['source']['download_link'] or 'amazonaws' in indexed_item['source']['download_link']:
package_name = indexed_item['source']['name']
else:
package_name = (json.loads(asset_status.text))['name']
Expand Down
40 changes: 39 additions & 1 deletion scripts/index.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import os, re, time, argparse, requests, hashlib, shutil
import os, re, time, argparse, requests, hashlib, shutil, json
from elasticsearch import Elasticsearch
from pathlib import Path
from datetime import datetime, timezone

import support as support
import read_microchip_index as MCHP
import read_codegrip_index as CODEGRIP

# Gets latest release headers from repository
def get_headers(api, token):
Expand Down Expand Up @@ -480,6 +481,41 @@ def index_microchip_packs(es: Elasticsearch, index_name: str):
resp = es.index(index=index_name, doc_type='necto_package', id=eachItem['name'], body=eachItem)
print(f"{resp["result"]} {resp['_id']}")

def index_codegrip_packs(es: Elasticsearch, index_name, doc_codegrip):
package_items = CODEGRIP.convert_item_to_json(doc_codegrip, True)

# Get the current time in UTC
current_time = datetime.now(timezone.utc).replace(microsecond=0)
# If you specifically want the 'Z' at the end instead of the offset
published_at = current_time.isoformat().replace('+00:00', 'Z')

for package in package_items:
new_version, package_updated = CODEGRIP.get_version(es, index_name, package_items[package]['package_name'], package_items[package]['mcus'])
major_new, minor_new, patch_new = map(int, new_version.split('.'))
major_csv, minor_csv, patch_csv = map(int, package_items[package]['package_version'].split('.'))
if major_new < major_csv or minor_new < minor_csv or patch_new < patch_csv:
new_version = package_items[package]['package_version']
package_updated = True
if package_updated:
doc = {
"name": package_items[package]['package_name'],
"display_name": package_items[package]['display_name'],
"author": "MikroElektronika",
"hidden": False,
"type": "programmer_dfp",
"version": new_version,
"package_version": package_items[package]['package_version'],
"published_at": published_at,
"category": "CODEGRIP Device Pack",
"download_link": package_items[package]['download_link'],
"package_changed": True,
"install_location": package_items[package]['install_location'],
"dependencies": json.loads(package_items[package]['dependencies']),
"mcus": package_items[package]['mcus']
}
resp = es.index(index=index_name, doc_type='necto_package', id=package_items[package]['package_name'], body=doc)
print(f"{resp["result"]} {resp['_id']}")

if __name__ == '__main__':
# First, check for arguments passed
def str2bool(v):
Expand All @@ -497,6 +533,7 @@ def str2bool(v):
parser.add_argument("repo", help="Repository name, e.g., 'username/repo'")
parser.add_argument("token", help="GitHub Token")
parser.add_argument("select_index", help="Provided index name")
parser.add_argument('doc_codegrip', type=str, help='Spreadsheet table download link.')
parser.add_argument("force_index", help="If true will update packages even if hash is the same", type=str2bool)
parser.add_argument("release_version", help="Selected release version to index to current database", type=str)
parser.add_argument("update_database", help="If true will update database.7z", type=str2bool)
Expand Down Expand Up @@ -527,6 +564,7 @@ def str2bool(v):
if 'live' not in args.select_index:
# TODO - uncomment once LIVE test is confirmed to work
index_microchip_packs(es, args.select_index)
index_codegrip_packs(es, args.select_index, args.doc_codegrip)

# Now index the new release
index_release_to_elasticsearch(
Expand Down
109 changes: 109 additions & 0 deletions scripts/read_codegrip_index.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import os, shutil
import support as support
from elasticsearch import Elasticsearch

def increment_version(version):
major, minor, patch = map(int, version.split('.'))
return f"{major}.{minor}.{patch + 1}"

def get_version(es: Elasticsearch, index_name, asset, mcu_list):
# Search query to use
query_search = {
"size": 5000,
"query": {
"match_all": {}
}
}

# Search the base with provided query
search_es_name = asset
num_of_retries = 1
while num_of_retries <= 10:
try:
response = es.search(index=index_name, body=query_search)
if not response['timed_out']:
break
except:
print("Executing search query - retry number %i" % num_of_retries)
num_of_retries += 1

indexed_version = None
for eachHit in response['hits']['hits']:
if not 'name' in eachHit['_source']:
continue
name = eachHit['_source']['name']
if name == search_es_name:
if 'version' in eachHit['_source']:
indexed_version = eachHit['_source']['version']
indexed_mcus = eachHit['_source']['mcus']

new_version = indexed_version

updated = False
if new_version:
for mcu in mcu_list:
if mcu not in indexed_mcus:
updated = True
if indexed_version.startswith('v'):
new_version = f'v{increment_version(indexed_version[1:])}'
else:
new_version = increment_version(indexed_version)
break
else:
new_version = '0.0.1'

return new_version, updated

def convert_item_to_json(docLink, saveToFile=False):
import urllib.request

with urllib.request.urlopen(docLink) as f:
html = f.read().decode('utf-8')
with open(os.path.join(os.path.dirname(__file__), 'devices.txt'), 'w') as devices:
devices.write(html)
devices.close()

import pandas as pd
import numpy as np

# Read the CSV file
df = pd.read_csv(os.path.join(os.path.dirname(__file__), "devices.txt"))

# Replace NaN with False for all other columns except 'package_name'
df.replace({np.nan: False}, inplace=True)

# Drop rows where `package_name` is NaN or invalid
df = df[df['package_name'] != False]

# Group by `package_name` and restructure data
grouped_programmer_data = {}

for package_name, group in df.groupby("package_name"):
# Get the first row of the group to extract common package details
package_details = group.iloc[0][[
"vendor",
"programmers",
"debuggers",
"category",
"package_name",
"package_version",
"display_name",
"install_location",
"download_link",
"dependencies",
]].to_dict()

# Add the list of MCU names under the key "mcus"
package_details["mcus"] = group["name"].tolist()

# Store the result by package_name
grouped_programmer_data[package_name] = package_details
if os.path.exists(os.path.join(os.path.dirname(__file__), "devices.txt")):
os.remove(os.path.join(os.path.dirname(__file__), "devices.txt"))

if saveToFile:
import json
with open(os.path.join(os.path.dirname(__file__), 'devices.json'), 'w') as json_file:
json_file.write(json.dumps(grouped_programmer_data, indent=4))

return grouped_programmer_data
Loading

0 comments on commit f5d6a21

Please sign in to comment.