From 347524887bdbd49b3b4847b9af078426bd803bab Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Thu, 14 May 2026 07:08:06 +0200 Subject: [PATCH] Fix documentation formatting --- dev/check-integration-bug-report.md | 117 ++++++++++++ dev/type-safety-report.md | 175 ++++++++++++++++++ docs/docs/infrahubctl/infrahubctl-graphql.mdx | 2 +- .../docs/infrahubctl/infrahubctl-validate.mdx | 4 +- .../sdk_ref/infrahub_sdk/client.mdx | 54 +++--- .../sdk_ref/infrahub_sdk/node/node.mdx | 2 +- .../infrahub_sdk/node/relationship.mdx | 2 +- .../mdx/mdx_collapsed_overload_section.py | 6 +- infrahub_sdk/analyzer.py | 4 +- infrahub_sdk/checks.py | 3 +- infrahub_sdk/client.py | 64 ++++--- infrahub_sdk/config.py | 1 + infrahub_sdk/convert_object_type.py | 2 + infrahub_sdk/ctl/graphql.py | 2 +- infrahub_sdk/ctl/parameters.py | 2 +- infrahub_sdk/ctl/validate.py | 4 +- infrahub_sdk/exceptions.py | 4 +- infrahub_sdk/generator.py | 6 +- infrahub_sdk/graphql/utils.py | 1 + infrahub_sdk/node/attribute.py | 10 +- infrahub_sdk/node/metadata.py | 12 +- infrahub_sdk/node/node.py | 38 ++-- infrahub_sdk/node/property.py | 6 +- infrahub_sdk/node/related_node.py | 40 ++-- infrahub_sdk/node/relationship.py | 44 +++-- infrahub_sdk/operation.py | 6 +- infrahub_sdk/protocols_generator/generator.py | 4 +- infrahub_sdk/pytest_plugin/items/base.py | 2 +- infrahub_sdk/query_groups.py | 9 +- infrahub_sdk/recorder.py | 4 +- infrahub_sdk/schema/__init__.py | 10 +- infrahub_sdk/schema/main.py | 9 +- infrahub_sdk/spec/object.py | 8 +- .../spec/processors/data_processor.py | 4 +- infrahub_sdk/spec/processors/factory.py | 2 +- .../spec/processors/range_expand_processor.py | 2 +- infrahub_sdk/spec/range_expansion.py | 6 +- infrahub_sdk/task/manager.py | 12 +- infrahub_sdk/transforms.py | 2 +- infrahub_sdk/types.py | 6 +- infrahub_sdk/utils.py | 17 +- infrahub_sdk/uuidt.py | 7 +- pyproject.toml | 4 - tasks.py | 2 +- tests/unit/ctl/test_render_app.py | 2 +- tests/unit/ctl/test_schema_app.py | 2 +- tests/unit/ctl/test_transform_app.py | 4 +- .../test_command_output_method.py | 5 +- .../content_gen_methods/test_jinja2_method.py | 5 +- tests/unit/sdk/conftest.py | 8 +- .../unit/sdk/pool/test_attribute_from_pool.py | 5 +- tests/unit/sdk/test_client.py | 4 +- tests/unit/sdk/test_config.py | 10 +- tests/unit/sdk/test_node.py | 8 +- tests/unit/sdk/test_schema.py | 2 +- tests/unit/sdk/test_topological_sort.py | 14 +- 56 files changed, 568 insertions(+), 222 deletions(-) create mode 100644 dev/check-integration-bug-report.md create mode 100644 dev/type-safety-report.md diff --git a/dev/check-integration-bug-report.md b/dev/check-integration-bug-report.md new file mode 100644 index 00000000..5bb3b272 --- /dev/null +++ b/dev/check-integration-bug-report.md @@ -0,0 +1,117 @@ +# Bug: `InfrahubCheckIntegrationItem.runtest` crashes with `AttributeError` + +A pre-existing bug in the pytest plugin's `check-integration` test kind, discovered while +adding test coverage for the `pytest.Stash` migration in PR #985. Not introduced by that PR +— `git log` confirms the same defect existed before the migration. + +## Symptom + +Any YAML test using `kind: check-integration` fails at runtime with: + +```text +AttributeError: 'InfrahubCheckIntegrationItem' object has no attribute 'check_instance' +``` + +The check never executes; the GraphQL query is never issued. + +## Location + +[infrahub_sdk/pytest_plugin/items/check.py:85-94](../infrahub_sdk/pytest_plugin/items/check.py#L85-L94) + +```python +class InfrahubCheckIntegrationItem(InfrahubCheckItem): + def runtest(self) -> None: + input_data = self.session.stash[INFRAHUB_CLIENT_KEY].query_gql_query( + self.check_instance.query, # <-- AttributeError here + variables=self.test.spec.get_variables_data(), + ) + passed = self.run_check(input_data) + + if not passed and self.test.expect == InfrahubTestExpectedResult.PASS: + raise CheckResultError(name=self.name) +``` + +## Root cause + +`self.check_instance` is declared as a type annotation in +[InfrahubCheckItem.__init__](../infrahub_sdk/pytest_plugin/items/check.py#L34) but never +assigned a value: + +```python +class InfrahubCheckItem(InfrahubItem): + def __init__(self, ...) -> None: + super().__init__(...) + self.check_instance: InfrahubCheck # annotation only, no assignment +``` + +The attribute is only created later, inside +[instantiate_check](../infrahub_sdk/pytest_plugin/items/check.py#L36-L44): + +```python +def instantiate_check(self) -> None: + ... + self.check_instance = check_class() +``` + +`InfrahubCheckSmokeItem.runtest` calls `self.instantiate_check()` as its first line, and +`InfrahubCheckUnitProcessItem.runtest` calls it indirectly via `self.run_check()` — +both compensate. `InfrahubCheckIntegrationItem.runtest` is the only path that reads +`self.check_instance.query` *before* either call, so it always crashes. + +`run_check` does call `instantiate_check`, but only after the failing line on 88. + +## Why it has gone unnoticed + +- No unit test covers `InfrahubCheckIntegrationItem.runtest`. Coverage of + [check.py](../infrahub_sdk/pytest_plugin/items/check.py) is 0% in + `tests/unit/pytest_plugin/`. +- Integration test kinds require a running Infrahub instance, so users hit a real + GraphQL endpoint to exercise this — the AttributeError surfaces before any HTTP call, + but the failure mode is easy to misdiagnose as "my test setup is wrong." +- Smoke and unit-process kinds for the same resource work fine, masking the issue. + +## Suggested fix + +Call `instantiate_check()` first, mirroring `InfrahubCheckSmokeItem.runtest`: + +```python +class InfrahubCheckIntegrationItem(InfrahubCheckItem): + def runtest(self) -> None: + self.instantiate_check() + input_data = self.session.stash[INFRAHUB_CLIENT_KEY].query_gql_query( + self.check_instance.query, + variables=self.test.spec.get_variables_data(), + ) + passed = self.run_check(input_data) + + if not passed and self.test.expect == InfrahubTestExpectedResult.PASS: + raise CheckResultError(name=self.name) +``` + +`run_check` will call `instantiate_check` a second time, which is wasted work but not +incorrect — it just re-imports the check class and rebuilds the instance. If that matters, +also drop the redundant call inside `run_check` and require all callers to instantiate +explicitly. The other items (`InfrahubPythonTransformIntegrationItem`) already follow that +pattern: they call `self.instantiate_transform()` at the top of `runtest`. + +## Reproducing + +A draft `test_check_integration` was written and removed from +[tests/unit/pytest_plugin/test_plugin.py](../tests/unit/pytest_plugin/test_plugin.py) +because there is no way to make it pass without the fix above. The reproduction shape is: + +1. Create a YAML test with `kind: check-integration` and a `variables: {}` spec. +2. Create an `infrahub_config.yml` that points to a check class file. +3. Create a check class with `query = "..."` and a `validate(data)` method. +4. Run `pytest --infrahub-repo-config=infrahub_config.yml`. + +Expected: 1 passed. Actual: 1 failed with the AttributeError above. + +Once the fix lands, the test can be added back; the test scaffolding is straightforward +because `httpx_mock` from the outer test scope intercepts the inner `pytester`-driven +client (pytester runs in-process by default). + +## Scope + +This bug is out of scope for PR #985 (stash migration is a pure refactor). It should be +fixed in a follow-up PR along with the test that covers the path. diff --git a/dev/type-safety-report.md b/dev/type-safety-report.md new file mode 100644 index 00000000..320f0029 --- /dev/null +++ b/dev/type-safety-report.md @@ -0,0 +1,175 @@ +# Type Safety Audit + +A snapshot of where the Infrahub Python SDK currently bypasses its type checkers (mypy, ty) and a roadmap toward zero violations — both in [pyproject.toml](../pyproject.toml) and inline. + +## Goals + +1. No file-level overrides in [pyproject.toml](../pyproject.toml) for mypy or ty. +2. No `# type: ignore[...]` (or `# ty: ignore[...]`) comments anywhere in the source. +3. **No use of `typing.cast`.** Casts hide real type problems instead of fixing them; the underlying disagreement should be resolved at the source (narrowing, generics, protocol design). + +The project already enforces strict mypy globally — `disallow_untyped_defs = true` in [pyproject.toml:267](../pyproject.toml#L267) — so the gap is closing. What remains is a finite, enumerable list of escape hatches. + +## Headline numbers + +| Surface | Count | Where | +|---|---|---| +| `# type: ignore[...]` in `infrahub_sdk/` | 67 | 16 files | +| `# type: ignore[...]` in `tests/` | 38 | 7 files | +| `cast(...)` calls (production code) | **3** | [infrahub_sdk/file_handler.py](../infrahub_sdk/file_handler.py) | +| `cast(...)` imports (test code) | 2 | tests (only one is meaningful — the other test imports unused) | +| `# ty: ignore` | 0 | — | +| `# noqa` | 36 | mostly ARG/S-rules, not type-related | +| ty per-file rule overrides in pyproject.toml | 12 blocks | covers 22+ source/test paths | +| mypy per-module overrides in pyproject.toml | 2 blocks | `infrahub_sdk.ctl.check`, `infrahub_sdk.utils` | + +## Why each ignore exists — categorised + +### 1. `cast()` in `infrahub_sdk/file_handler.py` — must go + +```python +# infrahub_sdk/file_handler.py:61, 64, 95 +return PreparedFile(file_object=cast("BinaryIO", file_obj), ...) +``` + +**Root cause:** `prepare_upload()` accepts `bytes | Path | BinaryIO | None`. After narrowing away `bytes`, `Path`, and `None`, the remaining branch is `BinaryIO` — but the checker can't narrow on negative `isinstance` chains for `BinaryIO` (it's a `typing.BinaryIO` alias for `IO[bytes]`, which isn't runtime-checkable). + +**Fix without `cast`:** restructure the function to dispatch on positive `isinstance` checks and never let control fall through to an "implicit BinaryIO" branch. Either: + +- Make the BinaryIO branch explicit with `isinstance(content, io.IOBase)` (or a `Protocol` with `read`/`close`) so narrowing succeeds, or +- Use `@overload` with a narrowed input type for the BinaryIO callers and validate at the boundary. + +The `anyio.to_thread.run_sync(content.open, "rb")` line is a real type-checker limitation (the return type isn't generic enough); this is fixable by pinning the return type with a small typed wrapper rather than `cast`. + +### 2. `union-attr` — schema fields typed as optional unions (25 ignores in src) + +By far the largest cluster. Examples: + +- [infrahub_sdk/node/node.py:202, 206, 209](../infrahub_sdk/node/node.py#L202): `self._schema.inherit_from` — `_schema` is typed as a union of schema kinds, not all of which expose `inherit_from`. +- [infrahub_sdk/node/related_node.py:249, 252, 255](../infrahub_sdk/node/related_node.py#L249): returning `self._peer` where `_peer` is initialised to `None`. +- [infrahub_sdk/node/node.py:847](../infrahub_sdk/node/node.py#L847): `self._schema.hierarchy` — only `NodeSchema` has `hierarchy`, but `_schema` allows `GenericSchema | NodeSchema | ProfileSchema | TemplateSchema`. +- [infrahub_sdk/query_groups.py:177, 271](../infrahub_sdk/query_groups.py#L177): `existing_group.members.peer_ids` — `members` is `RelationshipManager | None`. + +**Pattern to apply:** + +- Replace `# type: ignore[union-attr]` with explicit guards (`if isinstance(self._schema, NodeSchema):`) or with method dispatch on the schema variant (a `match` statement, or polymorphism on the schema hierarchy itself). +- For `_peer` returns, use the walrus operator or assign to a local first: `if (peer := self._peer) is not None: return peer`. The narrowing then survives the return. +- For [node.py:202-209](../infrahub_sdk/node/node.py#L202): introduce `is_ip_prefix()` etc. as methods on the schema variants themselves (Tell-Don't-Ask), then `self._schema.is_ip_prefix()` is total. + +### 3. `attr-defined` on pytest `Session` — workaround for pytest's API (18 ignores) + +Concentrated in [infrahub_sdk/pytest_plugin/](../infrahub_sdk/pytest_plugin/) — every `session.infrahub_*` access. `pytest.Session` doesn't allow user-defined attributes through its type stubs. + +**Fix:** define a `Protocol` (or `TypedDict` accessed via `session.stash`) for the Infrahub-specific session state. `pytest.Stash` ([pytest docs](https://docs.pytest.org/en/stable/reference/reference.html#stash)) is the typed, sanctioned mechanism for plugins to attach data to sessions/items. Migration path: + +```python +infrahub_client_key = pytest.StashKey[InfrahubClientSync]() +session.stash[infrahub_client_key] = infrahub_client # was: session.infrahub_client = ... +client = item.session.stash[infrahub_client_key] # was: self.session.infrahub_client +``` + +This eliminates ~30 `attr-defined` and `union-attr` ignores in [infrahub_sdk/pytest_plugin/](../infrahub_sdk/pytest_plugin/) at once. + +### 4. `return-value` — protocols vs concrete types (9 ignores in src) + +Examples in [infrahub_sdk/node/related_node.py](../infrahub_sdk/node/related_node.py) and [infrahub_sdk/node/relationship.py](../infrahub_sdk/node/relationship.py): a method declared to return `InfrahubNode` actually returns `CoreNodeBase | InfrahubNode | InfrahubNodeSync` from the store. + +**Fix:** make `NodeStore.get` generic on the kind parameter (`def get[T: CoreNode](self, ..., kind: type[T]) -> T`). Python 3.12+ syntax is supported (project requires `>=3.10`), so use the equivalent `TypeVar` pattern. This is the right fix for [infrahub_sdk/store.py](../infrahub_sdk/store.py) anyway — `_get` already has `# type: ignore[no-untyped-def]` because it lacks a return annotation. + +### 5. `arg-type` — pydantic / setattr (5 ignores in src) + +- [infrahub_sdk/node/attribute.py:95, 99](../infrahub_sdk/node/attribute.py#L95): `setattr(self, prop_name, NodeProperty(data=data.get(prop_name)))` — `data.get` returns `Any | None` and `NodeProperty.__init__` requires `dict`. + - **Fix:** narrow before constructing: `if (prop_data := data.get(prop_name)) is not None and isinstance(prop_data, dict): ...` +- [infrahub_sdk/transfer/importer/json.py:161](../infrahub_sdk/transfer/importer/json.py#L161): passing `add_relationships` as `task=` to a batch — the batch's task signature is over-narrow. + - **Fix:** widen the batch task `Callable` typing. +- [infrahub_sdk/client.py:1674](../infrahub_sdk/client.py#L1674): `node=branch` where `BranchData` doesn't satisfy the expected node protocol. Likely a true API mismatch worth investigating. + +### 6. `type-abstract` (2 ignores) + +[infrahub_sdk/testing/repository.py:101](../infrahub_sdk/testing/repository.py#L101) and [infrahub_sdk/ctl/branch.py:300](../infrahub_sdk/ctl/branch.py#L300): passing `kind=CoreGenericRepository` (an abstract protocol) where a concrete type is expected. The API expects a concrete `type[T]`; passing an abstract class is intentional here. + +**Fix:** the `kind` parameter should accept `type[T]` where `T` may itself be a Protocol — currently it's typed as a non-abstract concrete type. Loosen with `type[T] | type[Protocol[T]]` or convert to `TypeVar` bound to the generic base. + +### 7. `typeddict-item`, `assignment`, `no-untyped-def`, `annotation-unchecked` — small / one-off + +- [infrahub_sdk/ctl/cli_commands.py:242-243](../infrahub_sdk/ctl/cli_commands.py#L242): GraphQL error responses typed as a TypedDict that doesn't include `'message'` / `'locations'` — fix the TypedDict, or use the actual exception type. +- [infrahub_sdk/store.py:227](../infrahub_sdk/store.py#L227): `_get` lacks return annotation — easy fix once the generic refactor in §4 lands. +- [infrahub_sdk/pytest_plugin/loader.py:74, 81](../infrahub_sdk/pytest_plugin/loader.py#L74): `ITEMS_MAPPING[test.spec.kind]` returns `type[pytest.Item]` but the actual value is `type[InfrahubItem]` (covariant). Type the mapping itself as `dict[str, type[InfrahubItem]]` and remove the local annotation. + +### 8. `# type: ignore[union-attr]` in tests on `node.contract_start.value` etc. + +[tests/unit/sdk/test_file_object.py](../tests/unit/sdk/test_file_object.py) — generated/dynamic node attributes. Tests use the runtime attribute model that can't be statically known. Once the conftest fixtures are typed against `protocols.py`, these can use the typed protocol classes and the ignores fall away. + +## ty per-file overrides — required cleanup + +ty overrides in [pyproject.toml:139-258](../pyproject.toml#L139-L258) are well-documented inline ("Fix these incrementally by addressing violations and removing the override"). Status by block: + +| Block | Lines | Violations | Difficulty | +|---|---|---|---| +| `infrahub_sdk/checks.py` | [140-143](../pyproject.toml#L140) | 1 (`invalid-await`) | trivial — single `await` site | +| `infrahub_sdk/file_handler.py`, `infrahub_sdk/utils.py` | [145-149](../pyproject.toml#L145) | 5 total (`unresolved-attribute`) | small | +| `infrahub_sdk/transfer/**` | [151-155](../pyproject.toml#L151) | 3 (`invalid-argument-type`, `invalid-assignment`) | small — co-located in `importer/json.py` | +| `infrahub_sdk/node/node.py` | [158-162](../pyproject.toml#L158) | 9 (`invalid-argument-type` at lines 776, 855, 859, 862) | medium — couples to schema variant fix in §2 | +| `infrahub_sdk/ctl/config.py` | [165-169](../pyproject.toml#L165) | 1 (`unresolved-import`) | leave — `tomli`/`tomllib` Python-version split is necessary | +| `tests/fixtures/**` | [178-183](../pyproject.toml#L178) | many (`invalid-argument-type`, `possibly-missing-attribute`) | low priority — fixtures only | +| `tests/unit/sdk/conftest.py` | [188-192](../pyproject.toml#L188) | **434** (`invalid-argument-type`) | **largest single hotspot** | +| `tests/unit/sdk/test_node.py` (+2) | [194-207](../pyproject.toml#L194) | 97 across 3 files | medium | +| `tests/integration/**` etc. | [209-220](../pyproject.toml#L209) | ~120 | medium — depends on protocol-typed clients | +| `tests/unit/sdk/spec/test_object.py` (+2) | [222-233](../pyproject.toml#L222) | 29 | medium | +| `tests/unit/sdk/test_artifact.py` (+8) | [235-251](../pyproject.toml#L235) | 1-5 each | quick wins, file-by-file | +| `docs/**` | [253-262](../pyproject.toml#L253) | (`invalid-assignment`) | leave or delete docs typing? evaluate | + +The conftest issue at [pyproject.toml:188-192](../pyproject.toml#L188) (434 violations) is the single biggest target. Most likely the fixtures dynamically construct nodes whose attribute types ty can't infer — same root cause as §8 above. Fixing the fixture types unlocks both. + +## mypy per-module overrides — small surface + +```toml +# pyproject.toml:269-275 +[[tool.mypy.overrides]] +module = "infrahub_sdk.ctl.check" +disable_error_code = ["call-overload"] + +[[tool.mypy.overrides]] +module = "infrahub_sdk.utils" +disable_error_code = ["arg-type", "attr-defined", "return-value", "union-attr"] +``` + +`infrahub_sdk.utils` has four error codes silenced — this file has historically been an `Any`-heavy junk drawer. Audit for `Any` usage (the codebase has 355 `Any` annotations total — many of those are in [infrahub_sdk/utils.py](../infrahub_sdk/utils.py)) and split into typed sub-modules. + +## Roadmap (recommended order) + +The order minimises rework — each step removes ignores that would otherwise need to be rewritten by later steps. + +1. **Eliminate `cast` from [infrahub_sdk/file_handler.py](../infrahub_sdk/file_handler.py).** Smallest, highest-priority per the stated goal. Switch to positive `isinstance` dispatch + a typed `anyio.to_thread.run_sync` wrapper. +2. **Make `NodeStore.get` generic.** Removes 9 `return-value` ignores in [infrahub_sdk/node/related_node.py](../infrahub_sdk/node/related_node.py) and [infrahub_sdk/node/relationship.py](../infrahub_sdk/node/relationship.py), the `no-untyped-def` in [infrahub_sdk/store.py:227](../infrahub_sdk/store.py#L227), and likely several `invalid-argument-type` in tests at the same time. +3. **Migrate pytest plugin to `pytest.Stash`.** Removes ~30 ignores across [infrahub_sdk/pytest_plugin/](../infrahub_sdk/pytest_plugin/) and the entire `attr-defined` cluster. Self-contained refactor. +4. **Resolve schema variant `union-attr` ignores in [infrahub_sdk/node/node.py](../infrahub_sdk/node/node.py).** Push behaviour into the schema classes (or use `match`/`isinstance`). Removes ~13 ignores and the ty `node.py` override. +5. **Type the test fixtures in [tests/unit/sdk/conftest.py](../tests/unit/sdk/conftest.py).** Single biggest ty override (434 violations). Likely cascades into [tests/unit/sdk/test_node.py](../tests/unit/sdk/test_node.py) (97) and the integration overrides. +6. **Audit and re-type [infrahub_sdk/utils.py](../infrahub_sdk/utils.py).** Removes the 4-code mypy override; reduces project-wide `Any` count. +7. **Fix the long-tail individual files** (`checks.py` invalid-await, `cli_commands.py` typeddict-item, etc.) and delete each ty override block as its file goes clean. +8. **Tighten — once all the above are done:** flip `disallow_any_explicit = true` (mypy) and remove `unused-ignore-comment = "ignore"` from ty overrides ([pyproject.toml:133](../pyproject.toml#L133)) so dead ignores are flagged. + +## Easy wins audit (post-investigation) + +A round of verification against ty/mypy showed most "easy wins" were less easy than expected: + +| Candidate | Verdict | Reason | +| --- | --- | --- | +| Type `ITEMS_MAPPING: dict[str, type[InfrahubItem]]` in [pytest_plugin/loader.py](../infrahub_sdk/pytest_plugin/loader.py) | **Applied** | Pure annotation; removes 1 `# type: ignore[assignment]`. | +| Add return type to `NodeStoreBase._get` in [store.py:227](../infrahub_sdk/store.py#L227) | Deferred | The return type would propagate to callers and may surface new mismatches; needs broader review. | +| Remove `# type: ignore[annotation-unchecked]` from [test_node.py](../tests/unit/sdk/test_node.py) (4 sites) | Deferred | Mypy isn't run on `tests/` (see [tasks.py:295](../tasks.py#L295) — `mypy ... infrahub_sdk` only), so the ignores are inert. Cleaning them properly means fixing real `no-redef`/`assignment` errors at those lines, which is a real refactor. | +| Walrus narrowing in [related_node.py:249, 296](../infrahub_sdk/node/related_node.py#L249) | Skipped | Misanalysed: `self._peer` is `CoreNodeBase \| None`, the function returns `InfrahubNode`. The `return-value` ignore is for the `CoreNodeBase` ≠ `InfrahubNode` mismatch, not the optional. Narrowing away `None` doesn't help. | +| Removing any ty override block from pyproject.toml | Skipped | Empirically tested by disabling each block; all currently produce real diagnostics. None are stale. | +| Removing `cast` imports in test files | Skipped | Confirmed both [test_json.py](../tests/unit/ctl/formatters/test_json.py) and [test_allocate.py](../tests/unit/sdk/pool/test_allocate.py) actually use `cast`. | + +**Lesson for future cleanup:** validate each ignore against the actual error code mypy/ty emits before assuming it's stale or trivially fixable. Mismatched ignore codes (e.g., ignoring `[annotation-unchecked]` when the real error is `[assignment]`) are common and obscure the real work needed. + +## Acceptance criteria + +The project is "type-clean" by the user's definition when: + +- `grep -rn "# type: ignore" --include="*.py"` returns 0 lines. +- `grep -rn "cast(" --include="*.py"` returns 0 lines (excluding documentation). +- [pyproject.toml](../pyproject.toml) contains no `[[tool.ty.overrides]]` blocks except the `tomli`/`tomllib` import split (or removed entirely if a different solution is found). +- [pyproject.toml](../pyproject.toml) contains no `[[tool.mypy.overrides]]` blocks with `disable_error_code`. +- `uv run invoke lint` passes with all of the above. diff --git a/docs/docs/infrahubctl/infrahubctl-graphql.mdx b/docs/docs/infrahubctl/infrahubctl-graphql.mdx index 180bd2b8..0796ce8b 100644 --- a/docs/docs/infrahubctl/infrahubctl-graphql.mdx +++ b/docs/docs/infrahubctl/infrahubctl-graphql.mdx @@ -37,7 +37,7 @@ $ infrahubctl graphql export-schema [OPTIONS] ## `infrahubctl graphql generate-return-types` -Create Pydantic Models for GraphQL query return types +Create Pydantic Models for GraphQL query return types. **Usage**: diff --git a/docs/docs/infrahubctl/infrahubctl-validate.mdx b/docs/docs/infrahubctl/infrahubctl-validate.mdx index f96f59f1..450d81b0 100644 --- a/docs/docs/infrahubctl/infrahubctl-validate.mdx +++ b/docs/docs/infrahubctl/infrahubctl-validate.mdx @@ -21,7 +21,7 @@ $ infrahubctl validate [OPTIONS] COMMAND [ARGS]... ## `infrahubctl validate graphql-query` -Validate the format of a GraphQL Query stored locally by executing it on a remote GraphQL endpoint +Validate the format of a GraphQL Query stored locally by executing it on a remote GraphQL endpoint. **Usage**: @@ -44,7 +44,7 @@ $ infrahubctl validate graphql-query [OPTIONS] QUERY [VARIABLES]... ## `infrahubctl validate schema` -Validate the format of a schema file either in JSON or YAML +Validate the format of a schema file either in JSON or YAML. **Usage**: diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx index e8e42017..83b9f12c 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx @@ -101,7 +101,7 @@ Return the Infrahub version. get_user(self) -> dict ``` -Return user information +Return user information. #### `get_user_permissions` @@ -109,7 +109,7 @@ Return user information get_user_permissions(self) -> dict ``` -Return user permissions +Return user permissions. #### `count` @@ -140,7 +140,7 @@ all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeo all(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, query_name: str | None = None) -> list[InfrahubNode] | list[SchemaType] ``` -Retrieve all nodes of a given kind +Retrieve all nodes of a given kind. **Args:** @@ -219,7 +219,7 @@ Retrieve nodes of a given kind based on provided filters. clone(self, branch: str | None = None) -> InfrahubClient ``` -Return a cloned version of the client using the same configuration +Return a cloned version of the client using the same configuration. #### `execute_graphql` @@ -228,6 +228,7 @@ execute_graphql(self, query: str, variables: dict | None = None, branch_name: st ``` Execute a GraphQL query (or mutation). + If retry_on_failure is True, the query will retry until the server becomes reachable. **Args:** @@ -238,6 +239,10 @@ If retry_on_failure is True, the query will retry until the server becomes reach - `at`: Time when the query should be executed. Defaults to None. - `timeout`: Timeout in second for the query. Defaults to None. +**Returns:** + +- The GraphQL data payload (response["data"]). + **Raises:** - `GraphQLError`: When the GraphQL response contains errors. @@ -246,10 +251,6 @@ If retry_on_failure is True, the query will retry until the server becomes reach - `URLNotFoundError`: If the server returns a 404 response. - `Error`: If the response is unexpectedly missing. -**Returns:** - -- The GraphQL data payload (response["data"]). - #### `refresh_login` ```python @@ -405,10 +406,11 @@ repository_update_commit(self, branch_name: str, repository_id: str, commit: str convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNode ``` -Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names -and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field -in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-conversion -for more information. +Convert a given node to another kind on a given branch. + +`fields_mapping` keys are target fields names and its values indicate how to fill in these fields. +Any mandatory field not having an equivalent field in the source kind should be specified in this +mapping. See https://docs.infrahub.app/guides/object-conversion for more information. ### `InfrahubClientSync` @@ -502,7 +504,7 @@ Return the Infrahub version. get_user(self) -> dict ``` -Return user information +Return user information. #### `get_user_permissions` @@ -510,7 +512,7 @@ Return user information get_user_permissions(self) -> dict ``` -Return user permissions +Return user permissions. #### `clone` @@ -518,7 +520,7 @@ Return user permissions clone(self, branch: str | None = None) -> InfrahubClientSync ``` -Return a cloned version of the client using the same configuration +Return a cloned version of the client using the same configuration. #### `execute_graphql` @@ -527,6 +529,7 @@ execute_graphql(self, query: str, variables: dict | None = None, branch_name: st ``` Execute a GraphQL query (or mutation). + If retry_on_failure is True, the query will retry until the server becomes reachable. **Args:** @@ -537,6 +540,10 @@ If retry_on_failure is True, the query will retry until the server becomes reach - `at`: Time when the query should be executed. Defaults to None. - `timeout`: Timeout in second for the query. Defaults to None. +**Returns:** + +- The GraphQL data payload (`response["data"]`). + **Raises:** - `GraphQLError`: When the GraphQL response contains errors. @@ -545,10 +552,6 @@ If retry_on_failure is True, the query will retry until the server becomes reach - `URLNotFoundError`: If the server returns a 404 response. - `Error`: If the response is unexpectedly missing. -**Returns:** - -- The GraphQL data payload (`response["data"]`). - #### `count` ```python @@ -578,7 +581,7 @@ all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeo all(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, query_name: str | None = None) -> list[InfrahubNodeSync] | list[SchemaTypeSync] ``` -Retrieve all nodes of a given kind +Retrieve all nodes of a given kind. **Args:** @@ -811,10 +814,11 @@ login(self, refresh: bool = False) -> None convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNodeSync ``` -Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names -and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field -in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-conversion -for more information. +Convert a given node to another kind on a given branch. + +`fields_mapping` keys are target fields names and its values indicate how to fill in these fields. +Any mandatory field not having an equivalent field in the source kind should be specified in this +mapping. See https://docs.infrahub.app/guides/object-conversion for more information. ### `ProcessRelationsNode` @@ -826,7 +830,7 @@ for more information. ### `BaseClient` -Base class for InfrahubClient and InfrahubClientSync +Base class for InfrahubClient and InfrahubClientSync. **Methods:** diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx index 2523d98e..09036662 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx @@ -626,7 +626,7 @@ caller re-fetches the node first. ### `InfrahubNodeBase` -Base class for InfrahubNode and InfrahubNodeSync +Base class for InfrahubNode and InfrahubNodeSync. **Methods:** diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx index ec21f962..96d33d93 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx @@ -9,7 +9,7 @@ sidebarTitle: relationship ### `RelationshipManagerBase` -Base class for RelationshipManager and RelationshipManagerSync +Base class for RelationshipManager and RelationshipManagerSync. **Methods:** diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_section.py b/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_section.py index afb98006..3920af12 100644 --- a/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_section.py +++ b/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_section.py @@ -8,10 +8,10 @@ @dataclass class CollapsedOverloadSection(ASection): - """Collapses a group of overloaded method sections into one primary entry - followed by a collapsible ``
`` block with the remaining overloads. + """Collapse a group of overloaded method sections into a primary entry plus a details block. - The *primary* overload is the one with the most parameters (excluding + The remaining overloads are placed inside a collapsible ``
`` block following the + primary entry. The *primary* overload is the one with the most parameters (excluding ``self``). On ties, the first in source order wins. Example:: diff --git a/infrahub_sdk/analyzer.py b/infrahub_sdk/analyzer.py index 1f54c723..89ecc864 100644 --- a/infrahub_sdk/analyzer.py +++ b/infrahub_sdk/analyzer.py @@ -99,12 +99,12 @@ def variables(self) -> list[GraphQLQueryVariable]: return response async def calculate_depth(self) -> int: - """Number of nested levels in the query""" + """Number of nested levels in the query.""" fields = await self.get_fields() return calculate_dict_depth(data=fields) async def calculate_height(self) -> int: - """Total number of fields requested in the query""" + """Total number of fields requested in the query.""" fields = await self.get_fields() return calculate_dict_height(data=fields) diff --git a/infrahub_sdk/checks.py b/infrahub_sdk/checks.py index 0c275db3..36ce1199 100644 --- a/infrahub_sdk/checks.py +++ b/infrahub_sdk/checks.py @@ -147,11 +147,12 @@ def validate(self, data: dict) -> None: """Code to validate the status of this check.""" async def collect_data(self) -> dict: - """Query the result of the GraphQL Query defined in self.query and return the result""" + """Query the result of the GraphQL Query defined in self.query and return the result.""" return await self.client.query_gql_query(name=self.query, branch_name=self.branch_name, variables=self.params) async def run(self, data: dict | None = None) -> bool: """Execute the check after collecting the data from the GraphQL query. + The result of the check is determined based on the presence or not of ERROR log messages. """ if not data: diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index dcedd311..ef03edf9 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -118,7 +118,7 @@ def get_kind_as_string(kind: str | type[SchemaType | SchemaTypeSync]) -> str: class BaseClient: - """Base class for InfrahubClient and InfrahubClientSync""" + """Base class for InfrahubClient and InfrahubClientSync.""" def __init__( self, @@ -159,7 +159,7 @@ def __init__( _ = self.config.tls_context # Early load of the TLS context to catch errors def _initialize(self) -> None: - """Sets the properties for each version of the client""" + """Sets the properties for each version of the client.""" def _record(self, response: httpx.Response) -> None: self.config.custom_recorder.record(response) @@ -320,11 +320,11 @@ async def get_version(self) -> str: return response.get("InfrahubInfo", {}).get("version", "") async def get_user(self) -> dict: - """Return user information""" + """Return user information.""" return await self.execute_graphql(query=QUERY_USER) async def get_user_permissions(self) -> dict: - """Return user permissions""" + """Return user permissions.""" user_info = await self.get_user() return get_user_permissions(user_info["AccountProfile"]["member_of_groups"]["edges"]) @@ -700,7 +700,7 @@ async def all( include_metadata: bool = False, query_name: str | None = None, ) -> list[InfrahubNode] | list[SchemaType]: - """Retrieve all nodes of a given kind + """Retrieve all nodes of a given kind. Args: kind (str): kind of the nodes to query @@ -932,7 +932,7 @@ async def process_non_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]: return nodes def clone(self, branch: str | None = None) -> InfrahubClient: - """Return a cloned version of the client using the same configuration""" + """Return a cloned version of the client using the same configuration.""" return InfrahubClient(config=self.config.clone(branch=branch)) async def execute_graphql( @@ -945,6 +945,7 @@ async def execute_graphql( tracker: str | None = None, ) -> dict: """Execute a GraphQL query (or mutation). + If retry_on_failure is True, the query will retry until the server becomes reachable. Args: @@ -954,6 +955,9 @@ async def execute_graphql( at (str, optional): Time when the query should be executed. Defaults to None. timeout (int, optional): Timeout in second for the query. Defaults to None. + Returns: + dict: The GraphQL data payload (response["data"]). + Raises: GraphQLError: When the GraphQL response contains errors. ServerNotReachableError: If the server is not reachable after exhausting retries. @@ -961,9 +965,6 @@ async def execute_graphql( URLNotFoundError: If the server returns a 404 response. Error: If the response is unexpectedly missing. - Returns: - dict: The GraphQL data payload (response["data"]). - """ branch_name = branch_name or self.default_branch url = self._graphql_url(branch_name=branch_name, at=at) @@ -1042,12 +1043,12 @@ async def _execute_graphql_with_file( timeout: Timeout in seconds for the query. tracker: Optional tracker for request tracing. - Raises: - GraphQLError: When the GraphQL response contains errors. - Returns: dict: The GraphQL data payload (response["data"]). + Raises: + GraphQLError: When the GraphQL response contains errors. + """ branch_name = branch_name or self.default_branch url = self._graphql_url(branch_name=branch_name) @@ -1769,10 +1770,11 @@ async def convert_object_type( branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None, ) -> InfrahubNode: - """Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names - and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field - in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-conversion - for more information. + """Convert a given node to another kind on a given branch. + + `fields_mapping` keys are target fields names and its values indicate how to fill in these fields. + Any mandatory field not having an equivalent field in the source kind should be specified in this + mapping. See https://docs.infrahub.app/guides/object-conversion for more information. """ mapping_dict = ( {} @@ -1816,11 +1818,11 @@ def get_version(self) -> str: return response.get("InfrahubInfo", {}).get("version", "") def get_user(self) -> dict: - """Return user information""" + """Return user information.""" return self.execute_graphql(query=QUERY_USER) def get_user_permissions(self) -> dict: - """Return user permissions""" + """Return user permissions.""" user_info = self.get_user() return get_user_permissions(user_info["AccountProfile"]["member_of_groups"]["edges"]) @@ -1866,7 +1868,7 @@ def delete(self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = node.delete() def clone(self, branch: str | None = None) -> InfrahubClientSync: - """Return a cloned version of the client using the same configuration""" + """Return a cloned version of the client using the same configuration.""" return InfrahubClientSync(config=self.config.clone(branch=branch)) def execute_graphql( @@ -1879,6 +1881,7 @@ def execute_graphql( tracker: str | None = None, ) -> dict: """Execute a GraphQL query (or mutation). + If retry_on_failure is True, the query will retry until the server becomes reachable. Args: @@ -1888,6 +1891,9 @@ def execute_graphql( at (str, optional): Time when the query should be executed. Defaults to None. timeout (int, optional): Timeout in second for the query. Defaults to None. + Returns: + dict: The GraphQL data payload (`response["data"]`). + Raises: GraphQLError: When the GraphQL response contains errors. ServerNotReachableError: If the server is not reachable after exhausting retries. @@ -1895,9 +1901,6 @@ def execute_graphql( URLNotFoundError: If the server returns a 404 response. Error: If the response is unexpectedly missing. - Returns: - dict: The GraphQL data payload (`response["data"]`). - """ branch_name = branch_name or self.default_branch url = self._graphql_url(branch_name=branch_name, at=at) @@ -1976,12 +1979,12 @@ def _execute_graphql_with_file( timeout: Timeout in seconds for the query. tracker: Optional tracker for request tracing. - Raises: - GraphQLError: When the GraphQL response contains errors. - Returns: dict: The GraphQL data payload (response["data"]). + Raises: + GraphQLError: When the GraphQL response contains errors. + """ branch_name = branch_name or self.default_branch url = self._graphql_url(branch_name=branch_name) @@ -2176,7 +2179,7 @@ def all( include_metadata: bool = False, query_name: str | None = None, ) -> list[InfrahubNodeSync] | list[SchemaTypeSync]: - """Retrieve all nodes of a given kind + """Retrieve all nodes of a given kind. Args: kind (str): kind of the nodes to query @@ -3232,10 +3235,11 @@ def convert_object_type( branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None, ) -> InfrahubNodeSync: - """Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names - and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field - in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-conversion - for more information. + """Convert a given node to another kind on a given branch. + + `fields_mapping` keys are target fields names and its values indicate how to fill in these fields. + Any mandatory field not having an equivalent field in the source kind should be specified in this + mapping. See https://docs.infrahub.app/guides/object-conversion for more information. """ mapping_dict = ( {} diff --git a/infrahub_sdk/config.py b/infrahub_sdk/config.py index 87a0f82c..dc9eaa45 100644 --- a/infrahub_sdk/config.py +++ b/infrahub_sdk/config.py @@ -91,6 +91,7 @@ def settings_customise_sources( file_secret_settings: PydanticBaseSettingsSource, ) -> tuple[PydanticBaseSettingsSource, ...]: """Customize settings sources to track which fields were explicitly provided. + This allows us to properly handle authentication method precedence. """ diff --git a/infrahub_sdk/convert_object_type.py b/infrahub_sdk/convert_object_type.py index 3d54ae41..dbfce0c5 100644 --- a/infrahub_sdk/convert_object_type.py +++ b/infrahub_sdk/convert_object_type.py @@ -20,6 +20,7 @@ class ConversionFieldValue(BaseModel): # Only one of these fields can be not None """Holds the new value of the destination field during an object conversion. + Use `attribute_value` to specify the new raw value of an attribute. Use `peer_id` to specify new peer of a cardinality one relationship. Use `peers_ids` to specify new peers of a cardinality many relationship. @@ -41,6 +42,7 @@ def check_only_one_field(self) -> ConversionFieldValue: class ConversionFieldInput(BaseModel): """Indicates how to fill in the value of the destination field during an object conversion. + Use `source_field` to reuse the value of the corresponding field of the object being converted. Use `data` to specify the new value for the field. Use `use_default_value` to set the destination field to its schema default. diff --git a/infrahub_sdk/ctl/graphql.py b/infrahub_sdk/ctl/graphql.py index 095e353a..9bef44e8 100644 --- a/infrahub_sdk/ctl/graphql.py +++ b/infrahub_sdk/ctl/graphql.py @@ -121,7 +121,7 @@ async def generate_return_types( schema: Path = typer.Option("schema.graphql", help="Path to the GraphQL schema file."), _: str = CONFIG_PARAM, ) -> None: - """Create Pydantic Models for GraphQL query return types""" + """Create Pydantic Models for GraphQL query return types.""" query = Path.cwd() if query is None else query # Load the GraphQL schema diff --git a/infrahub_sdk/ctl/parameters.py b/infrahub_sdk/ctl/parameters.py index c1b7e5c2..c17f304c 100644 --- a/infrahub_sdk/ctl/parameters.py +++ b/infrahub_sdk/ctl/parameters.py @@ -4,7 +4,7 @@ def load_configuration(value: str) -> str: - """Load the configuration file using default environment variables or from the specified configuration file""" + """Load the configuration file using default environment variables or from the specified configuration file.""" config.SETTINGS.load_and_exit(config_file=value) return value diff --git a/infrahub_sdk/ctl/validate.py b/infrahub_sdk/ctl/validate.py index d8d58a74..cf69e2fa 100644 --- a/infrahub_sdk/ctl/validate.py +++ b/infrahub_sdk/ctl/validate.py @@ -30,7 +30,7 @@ def callback() -> None: @app.command(name="schema") @catch_exception(console=console) async def validate_schema(schema: Path, _: str = CONFIG_PARAM) -> None: - """Validate the format of a schema file either in JSON or YAML""" + """Validate the format of a schema file either in JSON or YAML.""" schema_data = load_yamlfile_from_disk_and_exit(paths=[schema], file_type=SchemaFile, console=console) if not schema_data: console.print(f"[red]Unable to find {schema}") @@ -62,7 +62,7 @@ def validate_graphql( _: str = CONFIG_PARAM, out: str = typer.Option(None, help="Path to a file to save the result."), ) -> None: - """Validate the format of a GraphQL Query stored locally by executing it on a remote GraphQL endpoint""" + """Validate the format of a GraphQL Query stored locally by executing it on a remote GraphQL endpoint.""" try: query_str = find_graphql_query(query) except QueryNotFoundError: diff --git a/infrahub_sdk/exceptions.py b/infrahub_sdk/exceptions.py index b1f3e465..af4237ee 100644 --- a/infrahub_sdk/exceptions.py +++ b/infrahub_sdk/exceptions.py @@ -156,11 +156,11 @@ class FeatureNotSupportedError(Error): class UninitializedError(Error): - """Raised when an object requires an initialization step before use""" + """Raised when an object requires an initialization step before use.""" class InvalidResponseError(Error): - """Raised when an object requires an initialization step before use""" + """Raised when an object requires an initialization step before use.""" class RepositoryFileNotFoundError(Error): diff --git a/infrahub_sdk/generator.py b/infrahub_sdk/generator.py index 379548aa..f4e97dea 100644 --- a/infrahub_sdk/generator.py +++ b/infrahub_sdk/generator.py @@ -14,7 +14,7 @@ class InfrahubGenerator(InfrahubOperation): - """Infrahub Generator class""" + """Infrahub Generator class.""" def __init__( self, @@ -66,7 +66,7 @@ def client(self, value: InfrahubClient) -> None: self._client = value async def collect_data(self) -> dict: - """Query the result of the GraphQL Query defined in self.query and return the result""" + """Query the result of the GraphQL Query defined in self.query and return the result.""" data = await self._init_client.query_gql_query( name=self.query, branch_name=self.branch_name, @@ -92,7 +92,7 @@ async def run(self, identifier: str, data: dict | None = None) -> None: @abstractmethod async def generate(self, data: dict) -> None: - """Code to run the generator + """Code to run the generator. Any child class of the InfrahubGenerator us expected to provide this method. The method is expected to use the provided InfrahubClient contained in self.client to create or update any nodes in an idempotent diff --git a/infrahub_sdk/graphql/utils.py b/infrahub_sdk/graphql/utils.py index abd56d21..837a19e2 100644 --- a/infrahub_sdk/graphql/utils.py +++ b/infrahub_sdk/graphql/utils.py @@ -92,6 +92,7 @@ def strip_typename_from_fragment(fragment: FragmentDefinitionNode) -> FragmentDe def get_class_def_index(module: ast.Module) -> int: """Get the index of the first class definition in the module. + It's useful to insert other classes before the first class definition. """ for idx, item in enumerate(module.body): diff --git a/infrahub_sdk/node/attribute.py b/infrahub_sdk/node/attribute.py index 7bcdbfd3..c1a424a8 100644 --- a/infrahub_sdk/node/attribute.py +++ b/infrahub_sdk/node/attribute.py @@ -45,10 +45,12 @@ class Attribute: """Represents an attribute of a Node, including its schema, value, and properties.""" def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict) -> None: - """Args: - name (str): The name of the attribute. - schema (AttributeSchema): The schema defining the attribute. - data (Union[Any, dict]): The data for the attribute, either in raw form or as a dictionary. + """Initialize the attribute. + + Args: + name (str): The name of the attribute. + schema (AttributeSchema): The schema defining the attribute. + data (Union[Any, dict]): The data for the attribute, either in raw form or as a dictionary. """ self.name = name diff --git a/infrahub_sdk/node/metadata.py b/infrahub_sdk/node/metadata.py index 690fd291..8466c67b 100644 --- a/infrahub_sdk/node/metadata.py +++ b/infrahub_sdk/node/metadata.py @@ -7,8 +7,10 @@ class NodeMetadata: """Represents metadata about a node (created_at, created_by, updated_at, updated_by).""" def __init__(self, data: dict | None = None) -> None: - """Args: - data: Data containing the metadata fields from the GraphQL response. + """Initialize the node metadata. + + Args: + data: Data containing the metadata fields from the GraphQL response. """ self.created_at: str | None = None @@ -45,8 +47,10 @@ class RelationshipMetadata: """Represents metadata about a relationship edge (updated_at, updated_by).""" def __init__(self, data: dict | None = None) -> None: - """Args: - data: Data containing the metadata fields from the GraphQL response. + """Initialize the relationship metadata. + + Args: + data: Data containing the metadata fields from the GraphQL response. """ self.updated_at: str | None = None diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index 6155fd2f..c39ac293 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -67,13 +67,15 @@ class UploadResult: class InfrahubNodeBase: - """Base class for InfrahubNode and InfrahubNodeSync""" + """Base class for InfrahubNode and InfrahubNodeSync.""" def __init__(self, schema: MainSchemaTypesAPI, branch: str, data: dict | None = None) -> None: - """Args: - schema: The schema of the node. - branch: The branch where the node resides. - data: Optional data to initialize the node. + """Initialize the base node. + + Args: + schema: The schema of the node. + branch: The branch where the node resides. + data: Optional data to initialize the node. """ self._schema = schema @@ -190,7 +192,7 @@ def _init_attributes(self, data: dict | None = None) -> None: ) def __setattr__(self, name: str, value: Any) -> None: - """Set values for attributes that exist or revert to normal behaviour""" + """Set values for attributes that exist or revert to normal behaviour.""" if "_attribute_data" in self.__dict__ and name in self._attribute_data: self._attribute_data[name].value = value return @@ -626,11 +628,13 @@ def __init__( branch: str | None = None, data: dict | None = None, ) -> None: - """Args: - client: The client used to interact with the backend. - schema: The schema of the node. - branch: The branch where the node resides. - data: Optional data to initialize the node. + """Initialize the async node. + + Args: + client: The client used to interact with the backend. + schema: The schema of the node. + branch: The branch where the node resides. + data: Optional data to initialize the node. """ self._client = client @@ -1599,11 +1603,13 @@ def __init__( branch: str | None = None, data: dict | None = None, ) -> None: - """Args: - client (InfrahubClientSync): The client used to interact with the backend synchronously. - schema (MainSchemaTypes): The schema of the node. - branch (Optional[str]): The branch where the node resides. - data (Optional[dict]): Optional data to initialize the node. + """Initialize the sync node. + + Args: + client (InfrahubClientSync): The client used to interact with the backend synchronously. + schema (MainSchemaTypes): The schema of the node. + branch (Optional[str]): The branch where the node resides. + data (Optional[dict]): Optional data to initialize the node. """ self._client = client diff --git a/infrahub_sdk/node/property.py b/infrahub_sdk/node/property.py index f3a01550..ef122d67 100644 --- a/infrahub_sdk/node/property.py +++ b/infrahub_sdk/node/property.py @@ -5,8 +5,10 @@ class NodeProperty: """Represents a property of a node, typically used for metadata like display labels.""" def __init__(self, data: dict | str) -> None: - """Args: - data (Union[dict, str]): Data representing the node property. + """Initialize the node property. + + Args: + data (Union[dict, str]): Data representing the node property. """ self.id = None diff --git a/infrahub_sdk/node/related_node.py b/infrahub_sdk/node/related_node.py index b586cd95..5ece4bf4 100644 --- a/infrahub_sdk/node/related_node.py +++ b/infrahub_sdk/node/related_node.py @@ -18,11 +18,13 @@ class RelatedNodeBase: """Base class for representing a related node in a relationship.""" def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, name: str | None = None) -> None: - """Args: - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. + """Initialize the base related node. + + Args: + branch (str): The branch where the related node resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Data representing the related node. + name (Optional[str]): The name of the related node. """ self.schema = schema @@ -222,12 +224,14 @@ def __init__( data: Any | dict, name: str | None = None, ) -> None: - """Args: - client (InfrahubClient): The client used to interact with the backend asynchronously. - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. + """Initialize the async related node. + + Args: + client (InfrahubClient): The client used to interact with the backend asynchronously. + branch (str): The branch where the related node resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Data representing the related node. + name (Optional[str]): The name of the related node. """ self._client = client @@ -269,12 +273,14 @@ def __init__( data: Any | dict, name: str | None = None, ) -> None: - """Args: - client (InfrahubClientSync): The client used to interact with the backend synchronously. - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. + """Initialize the sync related node. + + Args: + client (InfrahubClientSync): The client used to interact with the backend synchronously. + branch (str): The branch where the related node resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Data representing the related node. + name (Optional[str]): The name of the related node. """ self._client = client diff --git a/infrahub_sdk/node/relationship.py b/infrahub_sdk/node/relationship.py index ba5a8736..7457a73a 100644 --- a/infrahub_sdk/node/relationship.py +++ b/infrahub_sdk/node/relationship.py @@ -20,13 +20,15 @@ class RelationshipManagerBase: - """Base class for RelationshipManager and RelationshipManagerSync""" + """Base class for RelationshipManager and RelationshipManagerSync.""" def __init__(self, name: str, branch: str, schema: RelationshipSchemaAPI) -> None: - """Args: - name (str): The name of the relationship. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. + """Initialize the base relationship manager. + + Args: + name (str): The name of the relationship. + branch (str): The branch where the relationship resides. + schema (RelationshipSchema): The schema of the relationship. """ self.initialized: bool = False @@ -125,13 +127,15 @@ def __init__( schema: RelationshipSchemaAPI, data: Any | dict, ) -> None: - """Args: - name (str): The name of the relationship. - client (InfrahubClient): The client used to interact with the backend. - node (InfrahubNode): The node to which the relationship belongs. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Initial data for the relationships. + """Initialize the async relationship manager. + + Args: + name (str): The name of the relationship. + client (InfrahubClient): The client used to interact with the backend. + node (InfrahubNode): The node to which the relationship belongs. + branch (str): The branch where the relationship resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Initial data for the relationships. Raises: ValueError: If ``data`` is in an unexpected format. @@ -256,13 +260,15 @@ def __init__( schema: RelationshipSchemaAPI, data: Any | dict, ) -> None: - """Args: - name (str): The name of the relationship. - client (InfrahubClientSync): The client used to interact with the backend synchronously. - node (InfrahubNodeSync): The node to which the relationship belongs. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Initial data for the relationships. + """Initialize the sync relationship manager. + + Args: + name (str): The name of the relationship. + client (InfrahubClientSync): The client used to interact with the backend synchronously. + node (InfrahubNodeSync): The node to which the relationship belongs. + branch (str): The branch where the relationship resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Initial data for the relationships. Raises: ValueError: If ``data`` is in an unexpected format. diff --git a/infrahub_sdk/operation.py b/infrahub_sdk/operation.py index 54dca860..8ecd0173 100644 --- a/infrahub_sdk/operation.py +++ b/infrahub_sdk/operation.py @@ -44,17 +44,17 @@ def branch_name(self) -> str: @property def store(self) -> NodeStore: - """The store will be populated with nodes based on the query during the collection of data if activated""" + """The store will be populated with nodes based on the query during the collection of data if activated.""" return self._init_client.store @property def nodes(self) -> list[InfrahubNode]: - """Returns nodes collected and parsed during the data collection process if this feature is enabled""" + """Returns nodes collected and parsed during the data collection process if this feature is enabled.""" return self._nodes @property def related_nodes(self) -> list[InfrahubNode]: - """Returns nodes collected and parsed during the data collection process if this feature is enabled""" + """Returns nodes collected and parsed during the data collection process if this feature is enabled.""" return self._related_nodes async def process_nodes(self, data: dict) -> None: diff --git a/infrahub_sdk/protocols_generator/generator.py b/infrahub_sdk/protocols_generator/generator.py index 16439838..0d4c6108 100644 --- a/infrahub_sdk/protocols_generator/generator.py +++ b/infrahub_sdk/protocols_generator/generator.py @@ -27,7 +27,7 @@ def load_template() -> str: def move_to_end_of_list(lst: list, item: str) -> list: - """Move an item to the end of a list if it exists in the list""" + """Move an item to the end of a list if it exists in the list.""" if item in lst: lst.remove(item) lst.append(item) @@ -93,7 +93,7 @@ def render(self, sync: bool = True) -> str: @staticmethod def _jinja2_filter_syncify(value: str | list, sync: bool = False) -> str | list: - """Filter to help with the convertion to sync + """Filter to help with the convertion to sync. If a string is provided, append Sync to the end of the string If a list is provided, search for CoreNode and replace it with CoreNodeSync diff --git a/infrahub_sdk/pytest_plugin/items/base.py b/infrahub_sdk/pytest_plugin/items/base.py index bb7687d0..8049cc41 100644 --- a/infrahub_sdk/pytest_plugin/items/base.py +++ b/infrahub_sdk/pytest_plugin/items/base.py @@ -78,7 +78,7 @@ def reportinfo(self) -> tuple[Path | str, int | None, str]: @property def repository_base(self) -> str: - """Return the path to the root of the repository + """Return the path to the root of the repository. This will be an absolute path if --infrahub-config-path is an absolute path as happens when tests are started from within Infrahub server. diff --git a/infrahub_sdk/query_groups.py b/infrahub_sdk/query_groups.py index 0041ee6a..cc7faebd 100644 --- a/infrahub_sdk/query_groups.py +++ b/infrahub_sdk/query_groups.py @@ -14,7 +14,7 @@ class InfrahubGroupContextBase: - """Base class for InfrahubGroupContext and InfrahubGroupContextSync""" + """Base class for InfrahubGroupContext and InfrahubGroupContextSync.""" def __init__(self) -> None: self.related_node_ids: list[str] = [] @@ -52,7 +52,7 @@ def set_properties( self.branch = branch def _get_params_as_str(self) -> str: - """Convert the params in dict format, into a string""" + """Convert the params in dict format, into a string.""" params_as_str: list[str] = [] for key, value in self.params.items(): params_as_str.append(f"{key}: {value!s}") @@ -70,8 +70,9 @@ def _generate_group_name(self, suffix: str | None = None) -> str: return group_name def _generate_group_description(self, schema: MainSchemaTypesAPI) -> str: - """Generate the description of the group from the params - and ensure it's not longer than the maximum length of the description field. + """Generate the description of the group from the params. + + The result is truncated so it is not longer than the maximum length of the description field. """ if not self.params: return "" diff --git a/infrahub_sdk/recorder.py b/infrahub_sdk/recorder.py index e2038be7..1ca9deca 100644 --- a/infrahub_sdk/recorder.py +++ b/infrahub_sdk/recorder.py @@ -19,13 +19,13 @@ class RecorderType(str, enum.Enum): @runtime_checkable class Recorder(Protocol): def record(self, response: httpx.Response) -> None: - """Record the response from Infrahub""" + """Record the response from Infrahub.""" class NoRecorder: @staticmethod def record(response: httpx.Response) -> None: - """The NoRecorder just silently returns""" + """The NoRecorder just silently returns.""" @classmethod def default(cls) -> NoRecorder: diff --git a/infrahub_sdk/schema/__init__.py b/infrahub_sdk/schema/__init__.py index 8b58f98b..31b5444a 100644 --- a/infrahub_sdk/schema/__init__.py +++ b/infrahub_sdk/schema/__init__.py @@ -177,7 +177,7 @@ def validate_data_against_schema(self, schema: MainSchemaTypesAPI, data: dict) - ) def set_cache(self, schema: dict[str, Any] | SchemaRootAPI | BranchSchema, branch: str | None = None) -> None: - """Set the cache manually (primarily for unit testing) + """Set the cache manually (primarily for unit testing). Args: schema: The schema to set the cache as provided by the /api/schema endpoint either in dict or SchemaRootAPI format @@ -369,7 +369,7 @@ async def load( return self._validate_load_schema_response(response=response) async def wait_until_converged(self, branch: str | None = None) -> None: - """Wait until the schema has converged on the selected branch or the timeout has been reached""" + """Wait until the schema has converged on the selected branch or the timeout has been reached.""" waited = 0 while True: if await self.in_sync(branch=branch): @@ -384,7 +384,7 @@ async def wait_until_converged(self, branch: str | None = None) -> None: await asyncio.sleep(delay=1) async def in_sync(self, branch: str | None = None) -> bool: - """Indicate if the schema is in sync across all workers for the provided branch""" + """Indicate if the schema is in sync across all workers for the provided branch.""" response = await self.client.execute_graphql(query=SCHEMA_HASH_SYNC_STATUS, branch_name=branch) return response["InfrahubStatus"]["summary"]["schema_hash_synced"] @@ -899,7 +899,7 @@ def load( return self._validate_load_schema_response(response=response) def wait_until_converged(self, branch: str | None = None) -> None: - """Wait until the schema has converged on the selected branch or the timeout has been reached""" + """Wait until the schema has converged on the selected branch or the timeout has been reached.""" waited = 0 while True: if self.in_sync(branch=branch): @@ -914,7 +914,7 @@ def wait_until_converged(self, branch: str | None = None) -> None: sleep(1) def in_sync(self, branch: str | None = None) -> bool: - """Indicate if the schema is in sync across all workers for the provided branch""" + """Indicate if the schema is in sync across all workers for the provided branch.""" response = self.client.execute_graphql(query=SCHEMA_HASH_SYNC_STATUS, branch_name=branch) return response["InfrahubStatus"]["summary"]["schema_hash_synced"] diff --git a/infrahub_sdk/schema/main.py b/infrahub_sdk/schema/main.py index fd98e1e5..83bc69a8 100644 --- a/infrahub_sdk/schema/main.py +++ b/infrahub_sdk/schema/main.py @@ -294,7 +294,8 @@ def supports_artifact_definition(self) -> bool: @property def supports_artifacts(self) -> bool: - """Returns True if this schema supports artifact operations via CoreArtifactTarget inheritance. + """Return True if this schema supports artifact operations via CoreArtifactTarget inheritance. + Only NodeSchemaAPI overrides this; all other schema types return False by design because artifact capability is tied to node inheritance, not profiles, templates, or generics. """ @@ -302,7 +303,8 @@ def supports_artifacts(self) -> bool: @property def supports_file_object(self) -> bool: - """Returns True if this schema supports file object operations via CoreFileObject inheritance. + """Return True if this schema supports file object operations via CoreFileObject inheritance. + Only NodeSchemaAPI overrides this; all other schema types return False by design because file object capability is tied to node inheritance, not profiles, templates, or generics. """ @@ -315,7 +317,8 @@ def supports_hierarchy(self) -> bool: @property def hierarchical_relationship_schemas(self) -> list[RelationshipSchemaAPI]: - """Returns pseudo-schemas for parent/children/ancestors/descendants if hierarchy is set. + """Return pseudo-schemas for parent/children/ancestors/descendants if hierarchy is set. + Only NodeSchemaAPI overrides this; all other schema types return an empty list. """ return [] diff --git a/infrahub_sdk/spec/object.py b/infrahub_sdk/spec/object.py index cf59d898..d634442a 100644 --- a/infrahub_sdk/spec/object.py +++ b/infrahub_sdk/spec/object.py @@ -93,7 +93,7 @@ def peer_has_hfid(self) -> bool: @property def is_bidirectional(self) -> bool: - """Indicate if a relationship with the same identifier exists on the other side""" + """Indicate if a relationship with the same identifier exists on the other side.""" return bool(self.peer_rel) @property @@ -119,7 +119,7 @@ def is_reference(self) -> bool: return self.format in {RelationshipDataFormat.ONE_REF, RelationshipDataFormat.MANY_REF} def get_context(self, value: Any) -> dict: - """Return a dict to insert to the context if the relationship is mandatory""" + """Return a dict to insert to the context if the relationship is mandatory.""" if self.peer_rel and self.is_mandatory and self.peer_rel.cardinality == RelationshipCardinality.ONE: return {self.peer_rel.name: value} if self.peer_rel and self.is_mandatory and self.peer_rel.cardinality == RelationshipCardinality.MANY: @@ -129,7 +129,7 @@ def get_context(self, value: Any) -> dict: def find_matching_relationship( self, peer_schema: MainSchemaTypesAPI, force: bool = False ) -> RelationshipSchema | None: - """Find the matching relationship on the other side of the relationship""" + """Find the matching relationship on the other side of the relationship.""" if self.peer_rel and not force: return self.peer_rel @@ -209,7 +209,7 @@ class InfrahubObjectFileData(BaseModel): data: list[dict[str, Any]] = Field(default_factory=list) async def _get_processed_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]: - """Get data processed according to the strategy""" + """Get data processed according to the strategy.""" return await DataProcessorFactory.process_data(kind=self.kind, parameters=self.parameters, data=data) async def validate_format(self, client: InfrahubClient, branch: str | None = None) -> list[ObjectValidationError]: diff --git a/infrahub_sdk/spec/processors/data_processor.py b/infrahub_sdk/spec/processors/data_processor.py index 0b007fec..bf034b54 100644 --- a/infrahub_sdk/spec/processors/data_processor.py +++ b/infrahub_sdk/spec/processors/data_processor.py @@ -3,8 +3,8 @@ class DataProcessor(ABC): - """Abstract base class for data processing strategies""" + """Abstract base class for data processing strategies.""" @abstractmethod async def process_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]: - """Process the data according to the strategy""" + """Process the data according to the strategy.""" diff --git a/infrahub_sdk/spec/processors/factory.py b/infrahub_sdk/spec/processors/factory.py index 80f80efc..2a8455e8 100644 --- a/infrahub_sdk/spec/processors/factory.py +++ b/infrahub_sdk/spec/processors/factory.py @@ -9,7 +9,7 @@ class DataProcessorFactory: - """Factory to create appropriate data processor based on strategy""" + """Factory to create appropriate data processor based on strategy.""" @classmethod def get_processors(cls, kind: str, parameters: InfrahubObjectParameters) -> Sequence[DataProcessor]: diff --git a/infrahub_sdk/spec/processors/range_expand_processor.py b/infrahub_sdk/spec/processors/range_expand_processor.py index f0e4290c..b1d4f3f5 100644 --- a/infrahub_sdk/spec/processors/range_expand_processor.py +++ b/infrahub_sdk/spec/processors/range_expand_processor.py @@ -13,7 +13,7 @@ class RangeExpandDataProcessor(DataProcessor): - """Process data with range expansion""" + """Process data with range expansion.""" @classmethod async def process_data( diff --git a/infrahub_sdk/spec/range_expansion.py b/infrahub_sdk/spec/range_expansion.py index 4f231777..5ded0ae3 100644 --- a/infrahub_sdk/spec/range_expansion.py +++ b/infrahub_sdk/spec/range_expansion.py @@ -90,9 +90,9 @@ def _pairwise(lst: list[int]) -> list[tuple[int, int]]: def range_expansion(interface_pattern: str) -> list[str]: - """Expand string pattern into a list of strings, supporting both - number and single-character alphabet ranges. Heavily inspired by - Netutils interface_range_expansion but adapted to support letters. + """Expand a string pattern into a list of strings, supporting number and single-character alphabet ranges. + + Heavily inspired by Netutils interface_range_expansion but adapted to support letters. Args: interface_pattern: The string pattern that will be parsed to create the list of interfaces. diff --git a/infrahub_sdk/task/manager.py b/infrahub_sdk/task/manager.py index 1931f896..913c6b75 100644 --- a/infrahub_sdk/task/manager.py +++ b/infrahub_sdk/task/manager.py @@ -211,12 +211,12 @@ async def wait_for_completion(self, id: str, interval: int = 1, timeout: int = 6 interval: The interval to check the task state. Defaults to 1. timeout: The timeout to wait for the task to complete. Defaults to 60. - Raises: - TaskNotCompletedError: The task did not complete in the given timeout. - Returns: The task object. + Raises: + TaskNotCompletedError: The task did not complete in the given timeout. + """ for _ in range(timeout // interval): task = await self.get(id=id) @@ -451,12 +451,12 @@ def wait_for_completion(self, id: str, interval: int = 1, timeout: int = 60) -> interval: The interval to check the task state. Defaults to 1. timeout: The timeout to wait for the task to complete. Defaults to 60. - Raises: - TaskNotCompletedError: The task did not complete in the given timeout. - Returns: The task object. + Raises: + TaskNotCompletedError: The task did not complete in the given timeout. + """ for _ in range(timeout // interval): task = self.get(id=id) diff --git a/infrahub_sdk/transforms.py b/infrahub_sdk/transforms.py index bd663d71..bf2d14bf 100644 --- a/infrahub_sdk/transforms.py +++ b/infrahub_sdk/transforms.py @@ -54,7 +54,7 @@ def transform(self, data: dict) -> Any: pass async def collect_data(self) -> dict: - """Query the result of the GraphQL Query defined in self.query and return the result""" + """Query the result of the GraphQL Query defined in self.query and return the result.""" return await self.client.query_gql_query(name=self.query, branch_name=self.branch_name) async def run(self, data: dict | None = None) -> Any: diff --git a/infrahub_sdk/types.py b/infrahub_sdk/types.py index 59cbedef..88d5445b 100644 --- a/infrahub_sdk/types.py +++ b/infrahub_sdk/types.py @@ -49,13 +49,13 @@ async def __call__( @runtime_checkable class InfrahubLogger(Protocol): def debug(self, event: str | None = None, *args: Any, **kw: Any) -> Any: - """Send a debug event""" + """Send a debug event.""" def info(self, event: str | None = None, *args: Any, **kw: Any) -> Any: - """Send an info event""" + """Send an info event.""" def warning(self, event: str | None = None, *args: Any, **kw: Any) -> Any: - """Send a warning event""" + """Send a warning event.""" def error(self, event: str | None = None, *args: Any, **kw: Any) -> Any: """Send an error event.""" diff --git a/infrahub_sdk/utils.py b/infrahub_sdk/utils.py index ef7c9bb6..95da7504 100644 --- a/infrahub_sdk/utils.py +++ b/infrahub_sdk/utils.py @@ -28,7 +28,7 @@ def generate_short_id() -> str: - """Generate a short unique ID""" + """Generate a short unique ID.""" return base64.urlsafe_b64encode(uuid.uuid4().bytes).rstrip(b"=").decode("ascii").lower() @@ -121,10 +121,13 @@ def intersection(list1: list[Any], list2: list[Any]) -> list: def compare_lists(list1: list[Any], list2: list[Any]) -> tuple[list[Any], list[Any], list[Any]]: - """Compare 2 lists and return : - - the intersection of both - - the item present only in list1 - - the item present only in list2 + """Compare 2 lists and return the intersection plus items present only in each list. + + Returns: + - the intersection of both + - the item present only in list1 + - the item present only in list2 + """ in_both = intersection(list1=list1, list2=list2) in_list_1 = list(set(list1) - set(in_both)) @@ -202,7 +205,7 @@ def str_to_bool(value: str | bool | int) -> bool: def generate_request_filename(request: httpx.Request) -> str: - """Return a filename for a request sent to the Infrahub API + """Return a filename for a request sent to the Infrahub API. This function is used when recording and playing back requests, as Infrahub is using a GraphQL API it's not possible to rely on the URL endpoint alone to separate one request from another, @@ -270,7 +273,7 @@ def calculate_dict_height(data: dict, cnt: int = 0) -> int: async def extract_fields(selection_set: SelectionSetNode | None) -> dict[str, dict] | None: - """This function extract all the requested fields in a tree of Dict from a SelectionSetNode + """This function extract all the requested fields in a tree of Dict from a SelectionSetNode. The goal of this function is to limit the fields that we need to query from the backend. diff --git a/infrahub_sdk/uuidt.py b/infrahub_sdk/uuidt.py index 2a0efdb4..7b708dc8 100644 --- a/infrahub_sdk/uuidt.py +++ b/infrahub_sdk/uuidt.py @@ -23,8 +23,9 @@ def generate_uuid() -> str: def encode_number(number: int, min_length: int) -> str: """Encode a number into a base16 string and ensure the result has a minimum size. - If the initial response produced doesn't match the min requirement, - random number will be used to fill the gap + + If the initial response produced doesn't match the min requirement, a random number is + used to fill the gap. """ response = base16encode(number=number).lower() if len(response) >= min_length: @@ -56,7 +57,7 @@ def __str__(self) -> str: return f"{timestamp_str[:8]}-{timestamp_str[8:12]}-{timestamp_str[-4:]}-{hostname_str[:4]}-{namespace_str[:4]}{self.random_chars[:8]}" def short(self) -> str: - """Return the last 8 digit of the UUID (the most random part)""" + """Return the last 8 digit of the UUID (the most random part).""" return str(self)[-8:] @classmethod diff --git a/pyproject.toml b/pyproject.toml index 654ced5e..04e0acc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -349,14 +349,10 @@ ignore = [ "D104", # Missing docstring in public package "D105", # Missing docstring in magic method "D107", # Missing docstring in `__init__` - "D205", # Missing blank line after summary "D301", # Use `r"""` if any backslashes in a docstring - "D400", # First line should end with a period "D401", # First line of docstring should be in imperative mood "D404", # First word of the docstring should not be "This" - "D415", # First line should end with a period, question mark, or exclamation point "D417", # Missing argument description in the docstring - "D420", # Section name should end with a newline "DOC102", # Docstring contains extraneous parameter(s) "DOC201", # `return` is not documented in docstring "DOC402", # `yield` is not documented in docstring diff --git a/tasks.py b/tasks.py index ec91e909..8ff4cb38 100644 --- a/tasks.py +++ b/tasks.py @@ -411,7 +411,7 @@ def generate_python_sdk(context: Context) -> None: @task def generate_repository_jsonschema(context: Context) -> None: - """Generate JSON schema file for repository configuration. https://github.com/opsmill/infrahub-jsonschema""" + """Generate JSON schema file for repository configuration. https://github.com/opsmill/infrahub-jsonschema.""" from infrahub_sdk.schema.repository import InfrahubRepositoryConfig repository_jsonschema = MAIN_DIRECTORY_PATH / "generated" / "repository-config" / "develop.json" diff --git a/tests/unit/ctl/test_render_app.py b/tests/unit/ctl/test_render_app.py index 31ab463d..49300cc2 100644 --- a/tests/unit/ctl/test_render_app.py +++ b/tests/unit/ctl/test_render_app.py @@ -56,7 +56,7 @@ class RenderAppFailure: [pytest.param(tc, id=tc.name) for tc in RENDER_APP_FAIL_TEST_CASES], ) def test_validate_template_not_found(test_case: RenderAppFailure, httpx_mock: HTTPXMock) -> None: - """Ensure that the correct errors are caught""" + """Ensure that the correct errors are caught.""" httpx_mock.add_response( method="POST", url="http://mock/graphql/main", diff --git a/tests/unit/ctl/test_schema_app.py b/tests/unit/ctl/test_schema_app.py index b9fc52d5..8be46056 100644 --- a/tests/unit/ctl/test_schema_app.py +++ b/tests/unit/ctl/test_schema_app.py @@ -131,7 +131,7 @@ def test_schema_load_notvalid_namespace(httpx_mock: HTTPXMock) -> None: def test_load_valid_generic_schema(httpx_mock: HTTPXMock) -> None: - """A test which ensures that a generic schema is correctly loaded when loaded from infrahubctl command""" + """A test which ensures that a generic schema is correctly loaded when loaded from infrahubctl command.""" # Arrange fixture_file = get_fixtures_dir() / "models" / "valid_generic_schema.json" diff --git a/tests/unit/ctl/test_transform_app.py b/tests/unit/ctl/test_transform_app.py index b05be5c6..e036c23c 100644 --- a/tests/unit/ctl/test_transform_app.py +++ b/tests/unit/ctl/test_transform_app.py @@ -89,7 +89,7 @@ def test_transform_python_class_not_defined(tags_transform_dir: str) -> None: @staticmethod def test_gql_query_not_defined(tags_transform_dir: str) -> None: - """Case GraphQL Query is not defined""" + """Case GraphQL Query is not defined.""" # Remove GraphQL Query file gql_file = Path(Path(tags_transform_dir) / "tags_query.gql") Path.unlink(gql_file) @@ -102,7 +102,7 @@ def test_gql_query_not_defined(tags_transform_dir: str) -> None: @staticmethod def test_infrahubctl_transform_cmd_success(httpx_mock: HTTPXMock, tags_transform_dir: str) -> None: - """Case infrahubctl transform command executes successfully""" + """Case infrahubctl transform command executes successfully.""" httpx_mock.add_response( method="POST", url="http://mock/graphql/main", diff --git a/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py b/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py index 25d837e0..fe0d9bfb 100644 --- a/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py +++ b/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py @@ -18,8 +18,9 @@ def build(self) -> str: class TestCommandOutputDocContentGenMethod: def test_apply_runs_command_and_reads_output(self, tmp_path: Path) -> None: - """The method executes the command via context.run, then reads - the content from the temp file whose path was appended via --output. + """The method executes the command via context.run, then reads the output file. + + The content is read from the temp file whose path was appended via --output. """ output_content = "# Generated docs" diff --git a/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py b/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py index 55cd4e8b..9e1897cb 100644 --- a/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py +++ b/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py @@ -55,8 +55,9 @@ def test_apply_renders_with_multiple_variables(self, tmp_path: Path) -> None: assert result == "Hi there!" def test_auto_escaping_is_disabled(self, tmp_path: Path) -> None: - """HTML content in template variables must not be auto-escaped, - since the SDK Jinja2 environment does not enable autoescape. + """HTML content in template variables must not be auto-escaped. + + The SDK Jinja2 environment does not enable autoescape. """ # Arrange template_file = tmp_path / "test.j2" diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py index ad725532..f862374e 100644 --- a/tests/unit/sdk/conftest.py +++ b/tests/unit/sdk/conftest.py @@ -2079,7 +2079,7 @@ async def mock_query_infrahub_user(httpx_mock: HTTPXMock) -> HTTPXMock: @pytest.fixture def query_01() -> str: - """Simple query with one document""" + """Simple query with one document.""" return """ query { TestPerson { @@ -2151,7 +2151,7 @@ def query_02() -> str: @pytest.fixture def query_03() -> str: - """Advanced Query with 2 documents""" + """Advanced Query with 2 documents.""" return """ query FirstQuery { TestPerson { @@ -2190,7 +2190,7 @@ def query_03() -> str: @pytest.fixture def query_04() -> str: - """Simple query with variables""" + """Simple query with variables.""" return """ query ($person: String!){ TestPerson(name__value: $person) { @@ -2240,7 +2240,7 @@ def query_05() -> str: @pytest.fixture def query_06() -> str: - """Simple query with variables""" + """Simple query with variables.""" return """ query ( $str1: String, diff --git a/tests/unit/sdk/pool/test_attribute_from_pool.py b/tests/unit/sdk/pool/test_attribute_from_pool.py index 18ec619e..a11f2c63 100644 --- a/tests/unit/sdk/pool/test_attribute_from_pool.py +++ b/tests/unit/sdk/pool/test_attribute_from_pool.py @@ -1,4 +1,7 @@ -"""When using from_pool on a number attribute (e.g. vlan_id), the SDK should generate: +"""Tests that ``from_pool`` on a number attribute generates the expected GraphQL payload. + +When using from_pool on a number attribute (e.g. vlan_id), the SDK should generate:: + vlan_id: { from_pool: { id: "...", identifier: "..." } } There are two ways to request a pool allocation: diff --git a/tests/unit/sdk/test_client.py b/tests/unit/sdk/test_client.py index 7c8de9f3..ced021ca 100644 --- a/tests/unit/sdk/test_client.py +++ b/tests/unit/sdk/test_client.py @@ -691,7 +691,7 @@ async def test_query_echo(httpx_mock: HTTPXMock, echo_clients: BothClients, clie @pytest.mark.parametrize("client_type", client_types) async def test_clone(clients: BothClients, client_type: str) -> None: - """Validate that the configuration of a cloned client is a replica of the original client""" + """Validate that the configuration of a cloned client is a replica of the original client.""" if client_type == "standard": clone = clients.standard.clone() assert clone.config == clients.standard.config @@ -706,7 +706,7 @@ async def test_clone(clients: BothClients, client_type: str) -> None: @pytest.mark.parametrize("client_type", client_types) async def test_clone_define_branch(clients: BothClients, client_type: str) -> None: - """Validate that the clone branch parameter sets the correct branch of the cloned client""" + """Validate that the clone branch parameter sets the correct branch of the cloned client.""" clone_branch = "my_other_branch" if client_type == "standard": original_branch = clients.standard.default_branch diff --git a/tests/unit/sdk/test_config.py b/tests/unit/sdk/test_config.py index 0ba5ce6d..609e63e3 100644 --- a/tests/unit/sdk/test_config.py +++ b/tests/unit/sdk/test_config.py @@ -43,7 +43,7 @@ def test_config_address() -> None: def test_password_auth_overrides_env_token(monkeypatch: pytest.MonkeyPatch) -> None: - """Test that explicit username/password overrides INFRAHUB_API_TOKEN from environment""" + """Test that explicit username/password overrides INFRAHUB_API_TOKEN from environment.""" # Set environment variable for api_token monkeypatch.setenv("INFRAHUB_API_TOKEN", "token-from-env") @@ -58,7 +58,7 @@ def test_password_auth_overrides_env_token(monkeypatch: pytest.MonkeyPatch) -> N def test_token_auth_overrides_env_password(monkeypatch: pytest.MonkeyPatch) -> None: - """Test that explicit api_token overrides INFRAHUB_USERNAME and INFRAHUB_PASSWORD from environment""" + """Test that explicit api_token overrides INFRAHUB_USERNAME and INFRAHUB_PASSWORD from environment.""" # Set environment variables for username/password monkeypatch.setenv("INFRAHUB_USERNAME", "user-from-env") monkeypatch.setenv("INFRAHUB_PASSWORD", "pass-from-env") @@ -76,8 +76,10 @@ def test_token_auth_overrides_env_password(monkeypatch: pytest.MonkeyPatch) -> N def test_password_auth_overrides_env_token_when_password_env_var_and_username_explicit( monkeypatch: pytest.MonkeyPatch, ) -> None: - """Test that explicit username/password overrides INFRAHUB_API_TOKEN from environment when only username is provided - through Config object and password is provided through environment variable + """Test that explicit username/password overrides INFRAHUB_API_TOKEN from environment. + + The username is provided through the Config object and the password is provided through an + environment variable. """ # Set environment variable for api_token and password monkeypatch.setenv("INFRAHUB_API_TOKEN", "token-from-env") diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index ad3d77eb..ec69fa81 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -91,13 +91,13 @@ async def test_method_sanity() -> None: @pytest.mark.parametrize("value", SAFE_GRAPHQL_VALUES) def test_validate_graphql_value(value: str) -> None: - """All these values are safe and should not be converted""" + """All these values are safe and should not be converted.""" assert SAFE_VALUE.match(value) @pytest.mark.parametrize("value", UNSAFE_GRAPHQL_VALUES) def test_identify_unsafe_graphql_value(value: str) -> None: - """All these values are safe and should not be converted""" + """All these values are safe and should not be converted.""" assert not SAFE_VALUE.match(value) @@ -1391,7 +1391,7 @@ async def test_create_input_data(client: InfrahubClient, location_schema: NodeSc async def test_create_input_data_with_dropdown( client: InfrahubClient, location_schema_with_dropdown: NodeSchemaAPI, client_type: str ) -> None: - """Validate input data including dropdown field""" + """Validate input data including dropdown field.""" data = { "name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, @@ -1453,7 +1453,7 @@ async def test_update_input_data_existing_node_with_optional_relationship( async def test_create_input_data__with_relationships_02( client: InfrahubClient, location_schema: NodeSchemaAPI, client_type: str ) -> None: - """Validate input data with variables that needs replacements""" + """Validate input data with variables that needs replacements.""" data = { "name": {"value": "JFK1"}, "description": {"value": "JFK\n Airport"}, diff --git a/tests/unit/sdk/test_schema.py b/tests/unit/sdk/test_schema.py index 308fe64a..4b717366 100644 --- a/tests/unit/sdk/test_schema.py +++ b/tests/unit/sdk/test_schema.py @@ -64,7 +64,7 @@ async def test_fetch_schema(mock_schema_query_01: HTTPXMock, client_type: str) - @pytest.mark.parametrize("client_type", client_types) async def test_fetch_schema_conditional_refresh(mock_schema_query_01: HTTPXMock, client_type: str) -> None: - """Verify that only one schema request is sent if we request to update the schema but already have the correct hash""" + """Verify that only one schema request is sent if we request to update the schema but already have the correct hash.""" if client_type == "standard": client = InfrahubClient(config=Config(address="http://mock", insert_tracker=True)) nodes = await client.schema.all(branch="main") diff --git a/tests/unit/sdk/test_topological_sort.py b/tests/unit/sdk/test_topological_sort.py index 0028d24d..a9769d71 100644 --- a/tests/unit/sdk/test_topological_sort.py +++ b/tests/unit/sdk/test_topological_sort.py @@ -74,11 +74,15 @@ def test_topological_sort_disjoint_2() -> None: def test_topological_sort_binary_tree() -> None: - """A - b c - d e f g - hi j k - lm + """Sort a binary dependency tree with uneven depth on each side. + + The `dependencies` mapping below describes this tree:: + + a + b c + d e f g + h i j k + l m """ dependencies = { "a": ["b", "c"],