Revision 2nd of November
This commit is contained in:
parent
54d902aba1
commit
47a7b7d803
@ -1,12 +1,33 @@
|
||||
from pathlib import Path
|
||||
import json
|
||||
import yaml
|
||||
import jsonschema
|
||||
from typing import Any
|
||||
import requests
|
||||
try:
|
||||
from yachalk import chalk
|
||||
yachalk_imported = True
|
||||
except ModuleNotFoundError:
|
||||
yachalk_imported = False
|
||||
|
||||
dataset_path = Path('dataset')
|
||||
dataset_info = dataset_path / 'dataset.json'
|
||||
token = "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"
|
||||
|
||||
def error(msg: str):
|
||||
if yachalk_imported:
|
||||
msg = chalk.red(msg)
|
||||
else:
|
||||
msg = "Error: {}".format(msg)
|
||||
print(msg)
|
||||
|
||||
def warning(msg: str):
|
||||
if yachalk_imported:
|
||||
msg = chalk.yellow(msg)
|
||||
else:
|
||||
msg = "Warning: {}".format(msg)
|
||||
print(msg)
|
||||
|
||||
def open_dataset() -> dict[str, Any]:
|
||||
with open(dataset_info, 'r') as f:
|
||||
return json.load(f)
|
||||
@ -21,10 +42,10 @@ def get_json(uri: str):
|
||||
print(resp)
|
||||
if not resp.ok:
|
||||
try:
|
||||
error = resp.json()['message']
|
||||
resp_error = resp.json()['message']
|
||||
except Exception:
|
||||
error = resp.text
|
||||
raise Exception(f"Invalid response: {error}")
|
||||
resp_error = resp.text
|
||||
raise Exception(f"Invalid response: {resp_error}")
|
||||
return resp.json()
|
||||
|
||||
def get_repo(slug: str):
|
||||
@ -39,6 +60,55 @@ def get_file(slug: str, path: str):
|
||||
def plural(amount: int, name: str, plural: str = 's'):
|
||||
return f"{amount} {name}{plural[:amount^1]}"
|
||||
|
||||
from typing import NamedTuple
|
||||
|
||||
class Artifact(NamedTuple):
|
||||
file: str
|
||||
lines: list[int]
|
||||
|
||||
class SecurityRule(NamedTuple):
|
||||
status: str
|
||||
argument: str
|
||||
artifacts: None | list[Artifact]
|
||||
|
||||
rule_schema = yaml.safe_load("""type: object
|
||||
additionalProperties: no
|
||||
required:
|
||||
- status
|
||||
- argument
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
enum:
|
||||
- disregarded
|
||||
- not applicable
|
||||
- unknown
|
||||
argument:
|
||||
type: string
|
||||
artifacts:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
file:
|
||||
type: string
|
||||
lines:
|
||||
type: array
|
||||
items:
|
||||
type: integer""")
|
||||
|
||||
def check_security_rules(security_rules: dict[Any, Any]) -> dict[int, SecurityRule]:
|
||||
for n in range(1, 19):
|
||||
try:
|
||||
rule = security_rules.get(n, None)
|
||||
if rule is None: raise Exception('No result for rule {}'.format(n))
|
||||
jsonschema.validate(rule, rule_schema)
|
||||
except jsonschema.ValidationError as e:
|
||||
error("Security rule {n}: {msg} at $.{n}.{path}".format(n=n, msg=e.message, path=e.json_path))
|
||||
warning("Not checking further rules!")
|
||||
break
|
||||
return security_rules
|
||||
|
||||
update_dataset = False
|
||||
|
||||
def get_name(slug: str):
|
||||
@ -70,6 +140,14 @@ def write_model_readmes(dataset: dict[str, Any]):
|
||||
info['forks'] = forks
|
||||
info['owner_name'] = owner_name
|
||||
info['owner_slug'] = owner_slug
|
||||
security_rules_file = dir / 'security_rules.yaml'
|
||||
try:
|
||||
with open(security_rules_file, 'r') as f:
|
||||
security_rules = yaml.safe_load(f)
|
||||
security_rules = check_security_rules(security_rules)
|
||||
except FileNotFoundError:
|
||||
warning("Security rules file not found at {}".format(security_rules_file))
|
||||
security_rules = {}
|
||||
print(f"Writing readme file {readme}")
|
||||
with open(readme, 'w', encoding="utf-8") as f:
|
||||
f.write(f"""# {slug}
|
||||
@ -118,11 +196,22 @@ This repository contains of 17 manually created dataflow diagrams (DFDs) of micr
|
||||
|
||||
## Models
|
||||
|
||||
<div class="datatable-begin"></div>
|
||||
|
||||
Name | Source | LoC | Stars | Forks | DFD Items | Technologies
|
||||
-- | -- | -- | -- | -- | -- | --
|
||||
{chr(10).join(f"[{info['slug']}](dataset/{model_id}/README.md) | [GitHub](https://github.com/{info['slug']}) | {info['l']} | {info['stars']} | {info['forks']} | {info['t']} | {len(info['tech'])}" for model_id, info in dataset.items())}
|
||||
|
||||
<div class="datatable-end"></div>
|
||||
|
||||
## DFD Items
|
||||
|
||||
Do culpa deserunt est excepteur amet. Non pariatur ea elit ad eiusmod veniam exercitation nulla. Commodo do adipisicing amet et. Voluptate laboris commodo dolor eu mollit ipsum. Amet reprehenderit velit eu culpa amet exercitation. Elit esse ullamco duis mollit quis. Eiusmod qui reprehenderit sunt cupidatat Lorem anim occaecat enim sint eiusmod tempor.
|
||||
|
||||
## Use-Cases
|
||||
|
||||
Veniam culpa nostrud id laborum deserunt consectetur consectetur voluptate. Sint aute cupidatat velit irure elit laboris anim labore esse labore. Quis ullamco ut consequat amet. Enim sit laboris deserunt veniam duis aliqua irure proident.
|
||||
""")
|
||||
for model_id, info in dataset.items():
|
||||
f.write(f"[{info['slug']}](dataset/{model_id}/README.md) | [GitHub](https://github.com/{info['slug']}) | {info['l']} | {info['stars']} | {info['forks']} | {info['t']} | {len(info['tech'])}\n")
|
||||
|
||||
def main():
|
||||
dataset = open_dataset()
|
||||
|
Loading…
Reference in New Issue
Block a user