Captures uncommitted work that lived in the working tree on
v2-mxgw-integration but was orthogonal to the migration. Stashed
during the v2-mxgw merge to master (2026-04-30) and replanted here on
a feature branch off master so it's git-visible rather than living in
the stash list.
Two distinct buckets:
1. Tracked fixture/config refinements (10 files, ~36 lines):
- scripts/e2e/test-opcuaclient.ps1
- src/ZB.MOM.WW.OtOpcUa.Admin/appsettings.json
- 5 docker-compose.yml under tests/.../IntegrationTests/Docker/
(AbCip, Modbus, OpcUaClient, S7)
- 4 fixture .cs files (AbServerFixture, ModbusSimulatorFixture,
OpcPlcFixture, Snap7ServerFixture)
2. Untracked driver-gaps queue artifacts (~8000 lines):
- docs/plans/{abcip,ablegacy,focas,opcuaclient,s7,twincat}-plan.md
— per-driver gap plans
- docs/featuregaps.md — cross-cutting analysis
- docs/v2/focas-deployment.md, docs/v2/implementation/focas-simulator-plan.md
- followup.md — auto/driver-gaps queue follow-ups
- scripts/queue/ — PR-queue automation tooling (12 files including
pr-manifest.yaml at 1473 lines)
This commit is a snapshot for recoverability — review and split into
focused PRs (or discard) before merging anywhere downstream.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
123 lines
4.5 KiB
Bash
123 lines
4.5 KiB
Bash
#!/usr/bin/env bash
|
|
# Reads scripts/queue/pr-manifest.yaml and creates one Gitea issue per PR.
|
|
# Idempotent: skips PRs whose canonical id already exists as an open issue.
|
|
set -euo pipefail
|
|
HERE="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
. "$HERE/lib.sh"
|
|
|
|
if [ ! -f "$MANIFEST" ]; then
|
|
echo "manifest not found: $MANIFEST" >&2
|
|
exit 1
|
|
fi
|
|
|
|
# Collect existing canonical-id → issue# mapping (from queue-meta blocks)
|
|
EXISTING_JSON=$(api_repo GET "issues?state=all&type=issues&limit=200&page=1")
|
|
# multiple pages — keep paging until empty
|
|
PAGE=2
|
|
while :; do
|
|
PG=$(api_repo GET "issues?state=all&type=issues&limit=200&page=$PAGE")
|
|
COUNT=$(echo "$PG" | python -c "import sys,json; print(len(json.load(sys.stdin)))")
|
|
if [ "$COUNT" = "0" ]; then break; fi
|
|
EXISTING_JSON=$(python -c "import sys,json; a=json.loads(sys.argv[1]); b=json.loads(sys.argv[2]); print(json.dumps(a+b))" "$EXISTING_JSON" "$PG")
|
|
PAGE=$((PAGE+1))
|
|
done
|
|
|
|
python - "$MANIFEST" "$LABEL_MAP" <<'PY'
|
|
import json, sys, re, yaml, urllib.request, os
|
|
|
|
manifest_path, label_map_path = sys.argv[1], sys.argv[2]
|
|
gitea_token = os.environ["GITEA_TOKEN"]
|
|
api_base = "https://gitea.dohertylan.com/api/v1/repos/dohertj2/lmxopcua"
|
|
|
|
with open(manifest_path) as f: manifest = yaml.safe_load(f)
|
|
with open(label_map_path) as f: lmap = json.load(f)
|
|
|
|
def api(method, path, data=None):
|
|
req = urllib.request.Request(
|
|
f"{api_base}/{path}",
|
|
method=method,
|
|
headers={
|
|
"Authorization": f"token {gitea_token}",
|
|
"Content-Type": "application/json",
|
|
"Accept": "application/json",
|
|
},
|
|
data=json.dumps(data).encode() if data else None,
|
|
)
|
|
with urllib.request.urlopen(req) as r:
|
|
return json.loads(r.read().decode())
|
|
|
|
# Collect existing issues' canonical ids → issue#
|
|
existing = {}
|
|
page = 1
|
|
while True:
|
|
items = api("GET", f"issues?state=all&type=issues&limit=50&page={page}")
|
|
if not items: break
|
|
for it in items:
|
|
m = re.search(r'<!-- queue-meta\s*(\{.*?\})\s*-->', it.get("body","") or "", re.S)
|
|
if m:
|
|
try:
|
|
meta = json.loads(m.group(1))
|
|
if "id" in meta:
|
|
existing[meta["id"]] = it["number"]
|
|
except: pass
|
|
page += 1
|
|
|
|
print(f"existing queue issues: {len(existing)}")
|
|
|
|
filed = 0
|
|
skipped = 0
|
|
for pr in manifest["prs"]:
|
|
if pr["id"] in existing:
|
|
skipped += 1
|
|
continue
|
|
title = f"[{pr['driver']}] {pr['title']}"
|
|
meta = {
|
|
"id": pr["id"],
|
|
"driver": pr["driver"],
|
|
"phase": pr["phase"],
|
|
"plan_pr_id": pr.get("plan_pr_id",""),
|
|
"deps": pr.get("deps", []),
|
|
"cross_driver": pr.get("cross_driver", False),
|
|
}
|
|
body_parts = [
|
|
f"<!-- queue-meta\n{json.dumps(meta)}\n-->",
|
|
"## Auto-managed PR — Mode B (autonomous)",
|
|
f"**Driver**: `{pr['driver']}` **Phase**: `{pr['phase']}` **Plan PR**: `{pr.get('plan_pr_id','')}`",
|
|
f"**Plan**: [`{pr.get('plan_anchor','docs/plans/' + pr['driver'] + '-plan.md')}`]({pr.get('plan_anchor','../docs/plans/' + pr['driver'] + '-plan.md')})",
|
|
f"**Effort**: `{pr.get('effort','M')}` **Cross-driver**: `{pr.get('cross_driver', False)}`",
|
|
"",
|
|
"## Summary",
|
|
pr.get("summary","_(see plan)_"),
|
|
]
|
|
if pr.get("files"):
|
|
body_parts += ["", "## Source files", *[f"- `{f}`" for f in pr["files"]]]
|
|
if pr.get("docs"):
|
|
body_parts += ["", "## Docs", *[f"- `{d}`" for d in pr["docs"]]]
|
|
if pr.get("fixture"):
|
|
body_parts += ["", "## Fixture", *[f"- `{x}`" for x in pr["fixture"]]]
|
|
if pr.get("e2e"):
|
|
body_parts += ["", "## E2E", *[f"- `{x}`" for x in pr["e2e"]]]
|
|
if pr.get("deps"):
|
|
body_parts += ["", "## Depends on", *[f"- canonical: `{d}`" for d in pr["deps"]]]
|
|
if pr.get("notes"):
|
|
body_parts += ["", "## Notes", pr["notes"]]
|
|
body_parts += ["",
|
|
"---",
|
|
f"_Branch: `auto/{pr['driver']}/{pr.get('plan_pr_id','').replace('/','-')}`. Target: `auto/driver-gaps`._"]
|
|
body = "\n".join(body_parts)
|
|
|
|
label_names = [
|
|
f"driver/{pr['driver']}",
|
|
f"phase/{pr['phase']}",
|
|
"queue/queued",
|
|
"auto-managed",
|
|
]
|
|
if pr.get("cross_driver"): label_names.append("cross-driver")
|
|
label_ids = [lmap[n] for n in label_names if n in lmap]
|
|
issue = api("POST", "issues", {"title": title, "body": body, "labels": label_ids})
|
|
print(f" filed #{issue['number']}: {pr['id']}")
|
|
filed += 1
|
|
|
|
print(f"\nfiled {filed}, skipped (existing) {skipped}")
|
|
PY
|