Merge branch 'master' into Hall-sensor-tune
This commit is contained in:
31
.github/workflows/push.yml
vendored
31
.github/workflows/push.yml
vendored
@@ -42,6 +42,15 @@ jobs:
|
||||
source/Hexfile/LICENSE_RELEASE.md
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Generate json index file
|
||||
run: cd source && python3 metadata.py ${{ matrix.model }}.json
|
||||
|
||||
- name: Archive ${{ matrix.model }} index file
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: metadata
|
||||
path: source/Hexfile/${{ matrix.model }}.json
|
||||
|
||||
build_multi-lang:
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
@@ -80,6 +89,15 @@ jobs:
|
||||
source/Hexfile/LICENSE_RELEASE.md
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Generate json index file
|
||||
run: cd source && python3 metadata.py ${{ matrix.model }}.json
|
||||
|
||||
- name: Archive ${{ matrix.model }} index file
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: metadata
|
||||
path: source/Hexfile/${{ matrix.model }}.json
|
||||
|
||||
tests:
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
@@ -131,3 +149,16 @@ jobs:
|
||||
|
||||
- name: Check python with flake8
|
||||
run: flake8 Translations
|
||||
|
||||
shellcheck:
|
||||
name: runner / shellcheck
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: shellcheck
|
||||
uses: reviewdog/action-shellcheck@v1
|
||||
with:
|
||||
github_token: ${{ secrets.github_token }}
|
||||
reporter: github-pr-review # Change reporter.
|
||||
exclude: "./.git/*" # Optional.
|
||||
check_all_files_with_shebangs: "false" # Optional.
|
||||
|
||||
15
.github/workflows/shellcheck.yml
vendored
15
.github/workflows/shellcheck.yml
vendored
@@ -1,15 +0,0 @@
|
||||
name: Shell Check
|
||||
on: [pull_request]
|
||||
jobs:
|
||||
shellcheck:
|
||||
name: runner / shellcheck
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: shellcheck
|
||||
uses: reviewdog/action-shellcheck@v1
|
||||
with:
|
||||
github_token: ${{ secrets.github_token }}
|
||||
reporter: github-pr-review # Change reporter.
|
||||
exclude: "./.git/*" # Optional.
|
||||
check_all_files_with_shebangs: "false" # Optional.
|
||||
@@ -71,9 +71,16 @@ This may change during power up as the sources are negotiated in turn.
|
||||
|
||||
- **DC** input (dumb)
|
||||
- **QC** input (We used QC2/3 negotiation for current supply)
|
||||
- **PD** input (We used the PD subsystem to negotiate for the current supply)
|
||||
- **PD W. VBus** input (We used the PD subsystem to negotiate for the current supply); and VBus is connected to your input power source
|
||||
- **PD No VBus** input (We used the PD subsystem to negotiate for the current supply); and VBus is **NOT** connected to your input power source
|
||||
|
||||
### Max
|
||||
|
||||
This indicates the max temp in C that the system estimates it can measure the tip reliably to.
|
||||
This is dependant on a few factors including the handle temperature so it can move around during use.
|
||||
|
||||
|
||||
### Hall
|
||||
|
||||
This will appear if your device is capable of having a magnetic hall effect sensor fitted.
|
||||
This will show the current field strength reading from the sensor, this can be used to check the sensor is operational and measure how strong the magnetic field is for diagnostics.
|
||||
|
||||
38
setup.sh
38
setup.sh
@@ -1,38 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
# Setup shell file to setup the environment on an ubuntu machine
|
||||
sudo apt-get update && sudo apt-get install -y make bzip2 git python3 python3-pip wget dfu-util
|
||||
python3 -m pip install bdflib black flake8
|
||||
sudo mkdir -p /build
|
||||
cd /build
|
||||
|
||||
# Download source files to cache folder
|
||||
# Remember if these are updated, you need to update the md5 file respectively
|
||||
# Github checks out under $GITHUB_WORKSPACE
|
||||
MDPATH=${GITHUB_WORKSPACE:-/build/source/}
|
||||
sudo mkdir -p /build/cache
|
||||
cd /build/cache/
|
||||
if md5sum -c "$MDPATH"/ci/gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2.md5; then
|
||||
echo "Good MD5 ARM"
|
||||
else
|
||||
echo "ARM MD5 Mismatch, downloading fresh"
|
||||
rm -rf /build/cache/gcc-arm*.bz2 || true
|
||||
sudo wget -q "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2" -O gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2
|
||||
fi
|
||||
|
||||
if md5sum -c "$MDPATH"/ci/nuclei_riscv_newlibc_prebuilt_linux64_2020.08.tar.bz2.md5; then
|
||||
echo "Good MD5 RISCV"
|
||||
else
|
||||
echo "RISCV MD5 Mismatch, downloading fresh"
|
||||
rm -rf /build/cache/nuclei*.bz2 || true
|
||||
sudo wget -q "https://github.com/Ralim/nuclei-compiler/releases/download/2020.08/nuclei_riscv_newlibc_prebuilt_linux64_2020.08.tar.bz2" -O nuclei_riscv_newlibc_prebuilt_linux64_2020.08.tar.bz2
|
||||
fi
|
||||
|
||||
echo "Extracting compilers"
|
||||
sudo tar -xj -f gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2 -C /build/
|
||||
sudo tar -xj -f nuclei_riscv_newlibc_prebuilt_linux64_2020.08.tar.bz2 -C /build/
|
||||
|
||||
echo "Link into PATH"
|
||||
|
||||
sudo ln -s /build/gcc-arm-none-eabi-10-2020-q4-major/bin/* /usr/local/bin
|
||||
sudo ln -s /build/gcc/bin/* /usr/local/bin
|
||||
89
source/metadata.py
Executable file
89
source/metadata.py
Executable file
@@ -0,0 +1,89 @@
|
||||
#! python3
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# Creates an index metadata json file of the hexfiles folder
|
||||
# This is used by automation like the Pinecil updater
|
||||
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print("Requires the output json name as an arg")
|
||||
exit(1)
|
||||
|
||||
HERE = Path(__file__).resolve().parent
|
||||
|
||||
HexFileFolder = os.path.join(HERE, "Hexfile")
|
||||
OutputJSONPath = os.path.join(HexFileFolder, sys.argv[1])
|
||||
TranslationsFilesPath = os.path.join(HERE.parent, "Translations")
|
||||
|
||||
|
||||
def load_json(filename: str, skip_first_line: bool):
|
||||
with open(filename) as f:
|
||||
if skip_first_line:
|
||||
f.readline()
|
||||
return json.loads(f.read())
|
||||
|
||||
|
||||
def read_git_tag():
|
||||
return f"{subprocess.check_output(['git', 'rev-parse', '--short=7', 'HEAD']).strip().decode('ascii').upper()}"
|
||||
|
||||
|
||||
def read_version():
|
||||
with open(HERE / "version.h") as version_file:
|
||||
for line in version_file:
|
||||
if re.findall(r"^.*(?<=(#define)).*(?<=(BUILD_VERSION))", line):
|
||||
matches = re.findall(r"\"(.+?)\"", line)
|
||||
if matches:
|
||||
return matches[0]
|
||||
raise Exception("Could not parse version")
|
||||
|
||||
|
||||
# Fetch our file listings
|
||||
translation_files = [os.path.join(TranslationsFilesPath, f) for f in os.listdir(TranslationsFilesPath) if os.path.isfile(os.path.join(TranslationsFilesPath, f)) and f.endswith(".json")]
|
||||
output_files = [os.path.join(HexFileFolder, f) for f in os.listdir(HexFileFolder) if os.path.isfile(os.path.join(HexFileFolder, f))]
|
||||
|
||||
parsed_languages = {}
|
||||
for path in translation_files:
|
||||
lang: dict = load_json(path, skip_first_line=False)
|
||||
code = lang.get("languageCode", None)
|
||||
if code is not None:
|
||||
parsed_languages[code] = lang
|
||||
|
||||
# Now that we have the languages, we can generate our index of info on each file
|
||||
|
||||
output_json = {"git_tag": read_git_tag(), "release": read_version(), "contents": {}}
|
||||
|
||||
device_model_name = None
|
||||
for file_path in output_files:
|
||||
if file_path.endswith(".hex") or file_path.endswith(".dfu"):
|
||||
# Find out what language this file is
|
||||
name: str = os.path.basename(file_path)
|
||||
matches = re.findall(r"^([a-zA-Z0-9]+)_(.+)\.(.+)$", name)
|
||||
if matches:
|
||||
matches = matches[0]
|
||||
if len(matches) == 3:
|
||||
if device_model_name is None:
|
||||
device_model_name = matches[0]
|
||||
lang_code: str = matches[1]
|
||||
lang_file = parsed_languages.get(lang_code, None)
|
||||
if lang_file is None and lang_code.startswith("multi_"):
|
||||
# Multi files wont match, but we fake this by just taking the filename to it
|
||||
lang_file = {"languageLocalName": lang_code.replace("multi_", "").replace("compressed_", "")}
|
||||
if lang_file is None:
|
||||
raise Exception(f"Could not match language code {lang_code}")
|
||||
file_record = {"language_code": lang_code, "language_name": lang_file.get("languageLocalName", None)}
|
||||
output_json["contents"][name] = file_record
|
||||
else:
|
||||
print(f"failed to parse {matches}")
|
||||
|
||||
if device_model_name is None:
|
||||
raise Exception("No files parsed")
|
||||
|
||||
output_json["model"] = device_model_name
|
||||
with open(OutputJSONPath, "w", encoding="utf8") as json_file:
|
||||
json.dump(output_json, json_file, ensure_ascii=False)
|
||||
Reference in New Issue
Block a user