1from collections import deque
2from dataclasses import MISSING, dataclass, field, fields
3from enum import Enum
4from pathlib import Path
5from typing import Any, Literal, TypedDict, cast
6
7from jsonschema import ValidationError, validate
8from sphinx.application import Sphinx
9from sphinx.config import Config as _SphinxConfig
10
11from sphinx_codelinks.source_discover.config import (
12 CommentType,
13 SourceDiscoverConfig,
14 SourceDiscoverSectionConfigType,
15)
16from sphinx_codelinks.source_discover.source_discover import SourceDiscover
17
18UNIX_NEWLINE = "\n"
19
20
21COMMENT_MARKERS = {
[docs] 22 # @Support C and C++ style comments, IMPL_C_1, impl, [FE_C_SUPPORT, FE_CPP]
23 CommentType.cpp: ["//", "/*"],
[docs] 24 # @Support Python style comments, IMPL_PY_1, impl, [FE_PY]
25 CommentType.python: ["#"],
26 CommentType.cs: ["//", "/*", "///"],
27}
28ESCAPE = "\\"
29
30
31class CommentCategory(str, Enum):
32 comment = "comment"
33 docstring = "expression_statement"
34
35
36class NeedIdRefsConfigType(TypedDict):
37 markers: list[str]
38
39
40@dataclass
41class NeedIdRefsConfig:
42 @classmethod
43 def field_names(cls) -> set[str]:
44 return {item.name for item in fields(cls)}
45
46 markers: list[str] = field(
47 default_factory=lambda: ["@need-ids:"],
48 metadata={"schema": {"type": "array", "items": {"type": "string"}}},
49 )
50 """The markers to extract need ids from"""
51
52 @classmethod
53 def get_schema(cls, name: str) -> dict[str, Any] | None: # type: ignore[explicit-any]
54 _field = next(_field for _field in fields(cls) if _field.name is name)
55 if _field.metadata and "schema" in _field.metadata:
56 return cast(dict[str, Any], _field.metadata["schema"]) # type: ignore[explicit-any]
57 return None
58
59 def check_schema(self) -> list[str]:
60 errors = []
61 for _field_name in self.field_names():
62 schema = self.get_schema(_field_name)
63 value = getattr(self, _field_name)
64 try:
65 validate(instance=value, schema=schema) # type: ignore[arg-type] # validate has no type
66 except ValidationError as e:
67 errors.append(
68 f"Schema validation error in field '{_field_name}': {e.message}"
69 )
70 return errors
71
72
73class MarkedRstConfigType(TypedDict):
74 start_sequence: str
75 end_sequence: str
76
77
78@dataclass
79class MarkedRstConfig:
80 @classmethod
81 def field_names(cls) -> set[str]:
82 return {item.name for item in fields(cls)}
83
84 start_sequence: str = field(default="@rst", metadata={"schema": {"type": "string"}})
85 """Chars sequence to indicate the start of the rst text."""
86 end_sequence: str = field(
87 default="@endrst", metadata={"schema": {"type": "string"}}
88 )
89 """Chars sequence to indicate the end of the rst text."""
90
91 @classmethod
92 def get_schema(cls, name: str) -> dict[str, Any] | None: # type: ignore[explicit-any]
93 _field = next(_field for _field in fields(cls) if _field.name is name)
94 if _field.metadata and "schema" in _field.metadata:
95 return cast(dict[str, Any], _field.metadata["schema"]) # type: ignore[explicit-any]
96 return None
97
98 def check_schema(self) -> list[str]:
99 errors = []
100 for _field_name in self.field_names():
101 schema = self.get_schema(_field_name)
102 value = getattr(self, _field_name)
103 try:
104 validate(instance=value, schema=schema) # type: ignore[arg-type] # validate has no type
105 except ValidationError as e:
106 errors.append(
107 f"Schema validation error in field '{_field_name}': {e.message}"
108 )
109 return errors
110
111 def check_sequence_mutually_exclusive(self) -> list[str]:
112 errors = []
113 if self.start_sequence == self.end_sequence:
114 errors.append("start_sequence and end_sequence cannot be the same.")
115 return errors
116
117 def check_fields_configuration(self) -> list[str]:
118 return self.check_schema() + self.check_sequence_mutually_exclusive()
119
120
121class FieldConfig(TypedDict, total=False):
122 name: str
123 type: Literal["str", "list[str]"]
124 default: str | list[str] | None
125
126
127class OneLineCommentStyleType(TypedDict):
128 start_sequence: str
129 end_sequence: str
130 field_split_char: str
131 needs_fields: list[FieldConfig]
132
133
134@dataclass
135class OneLineCommentStyle:
136 def __setattr__(self, name: str, value: Any) -> None: # type: ignore[explicit-any]
137 if name == "needs_fields":
138 # apply default to fields
139 self.apply_needs_field_default(value)
140 return super().__setattr__(name, value)
141
142 @classmethod
143 def field_names(cls) -> set[str]:
144 return {item.name for item in fields(cls)}
145
146 start_sequence: str = field(default="@", metadata={"schema": {"type": "string"}})
147 """Chars sequence to indicate the start of the one-line comment."""
148
149 end_sequence: str = field(
150 default=UNIX_NEWLINE, metadata={"schema": {"type": "string"}}
151 )
152 """Chars sequence to indicate the end of the one-line comment."""
153
154 field_split_char: str = field(default=",", metadata={"schema": {"type": "string"}})
155 """Char sequence to split the fields."""
156
157 needs_fields: list[FieldConfig] = field(
158 default_factory=lambda: [
159 {"name": "title"},
160 {"name": "id"},
161 {"name": "type", "default": "impl"},
162 {"name": "links", "type": "list[str]", "default": []},
163 ],
164 metadata={
165 "required_fields": ["title", "type"],
166 "field_default": {
167 "type": "str",
168 },
169 "schema": {
170 "type": "array",
171 "items": {
172 "type": "object",
173 "properties": {
174 "name": {"type": "string"},
175 "type": {
176 "type": "string",
177 "enum": ["str", "list[str]"],
178 "default": "str",
179 },
180 "default": {
181 "anyOf": [
182 {"type": "string"},
183 {"type": "array", "items": {"type": "string"}},
184 ]
185 },
186 },
187 "required": ["name"],
188 "additionalProperties": False,
189 "allOf": [
190 {
191 "if": {"properties": {"type": {"const": "list[str]"}}},
192 "then": {
193 "properties": {
194 "default": {
195 "type": "array",
196 "items": {"type": "string"},
197 }
198 }
199 },
200 },
201 {
202 "if": {"properties": {"type": {"const": "str"}}},
203 "then": {"properties": {"default": {"type": "string"}}},
204 },
205 ],
206 },
207 },
208 },
209 )
210
211 @classmethod
212 def apply_needs_field_default(cls, given_fields: list[FieldConfig]) -> None:
213 field_default = next(
214 _field.metadata["field_default"]
215 for _field in fields(cls)
216 if _field.name == "needs_fields"
217 )
218
219 for _field in given_fields:
220 for _default in field_default:
221 if _default not in _field:
222 _field[_default] = field_default[_default] # type: ignore[literal-required] # dynamically assign keys
223
224 @classmethod
225 def get_required_fields(cls, name: str) -> list[str] | None:
226 _field = next(_field for _field in fields(cls) if _field.name is name)
227 if _field.metadata:
228 return cast(list[str], _field.metadata["required_fields"])
229 return None
230
231 @classmethod
232 def get_schema(cls, name: str) -> dict[str, Any] | None: # type: ignore[explicit-any]
233 _field = next(_field for _field in fields(cls) if _field.name is name)
234 if _field.metadata and "schema" in _field.metadata:
235 return cast(dict[str, Any], _field.metadata["schema"]) # type: ignore[explicit-any]
236 return None
237
238 def check_schema(self) -> list[str]:
239 errors = []
240 for _field_name in self.field_names():
241 schema = self.get_schema(_field_name)
242 value = getattr(self, _field_name)
243 try:
244 validate(instance=value, schema=schema) # type: ignore[arg-type] # validate has no type specified
245 except ValidationError as e:
246 if _field_name == "needs_fields":
247 need_field_name = value[e.path[0]]["name"]
248 errors.append(
249 f"Schema validation error in need_fields '{need_field_name}': {e.message}"
250 )
251 else:
252 errors.append(
253 f"Schema validation error in field '{_field_name}': {e.message}"
254 )
255 return errors
256
257 def check_required_fields(self) -> list[str]:
258 errors = []
259 required_fields = self.get_required_fields("needs_fields")
260 if required_fields is None:
261 errors.append("No required fields specified.")
262 return errors
263 given_field_names = [_field["name"] for _field in self.needs_fields]
264 missing_fields = set(required_fields) - set(given_field_names)
265 if len(missing_fields) != 0:
266 errors.append(f"Missing required fields: {sorted(missing_fields)}")
267
268 return errors
269
270 def check_fields_mutually_exclusive(self) -> list[str]:
271 errors = []
272 needs_field_names = set()
273 for _field in self.needs_fields:
274 if _field["name"] in needs_field_names:
275 errors.append(f"Field '{_field['name']}' is defined multiple times.")
276 needs_field_names.add(_field["name"])
277 return errors
278
279 def check_fields_configuration(self) -> list[str]:
280 return (
281 self.check_schema()
282 + self.check_required_fields()
283 + self.check_fields_mutually_exclusive()
284 )
285
286 def get_cnt_required_fields(self) -> int:
287 cnt_required_fields = 0
288 for _field in self.needs_fields:
289 if _field.get("default") is None:
290 cnt_required_fields += 1
291 return cnt_required_fields
292
293 def get_pos_list_str(self) -> list[int]:
294 pos_list_str = []
295 for idx, _field in enumerate(self.needs_fields):
296 if _field["type"] == "list[str]":
297 pos_list_str.append(idx + 1)
298 return pos_list_str
299
300
301class AnalyseSectionConfigType(TypedDict, total=False):
302 """Define typing for loading `analyse` section from the file."""
303
304 get_need_id_refs: bool
305 get_oneline_needs: bool
306 get_rst: bool
307 outdir: str
308 git_root: str
309 need_id_refs: NeedIdRefsConfigType
310 marked_rst: MarkedRstConfigType
311 oneline_comment_style: OneLineCommentStyleType
312
313
314class SourceAnalyseConfigType(TypedDict, total=False):
315 """Define typing for its API configuration."""
316
317 src_files: list[Path]
318 src_dir: Path
319 comment_type: CommentType
320 get_need_id_refs: bool
321 get_oneline_needs: bool
322 get_rst: bool
323 git_root: Path | None
324 need_id_refs_config: NeedIdRefsConfig
325 marked_rst_config: MarkedRstConfig
326 oneline_comment_style: OneLineCommentStyle
327
328
329class ProjectsAnalyseConfigType(TypedDict, total=False):
330 projects_config: dict[str, SourceAnalyseConfigType]
331
332
333@dataclass
334class SourceAnalyseConfig:
335 @classmethod
336 def field_names(cls) -> set[str]:
337 return {item.name for item in fields(cls)}
338
339 src_files: list[Path] = field(
340 default_factory=list,
341 metadata={"schema": {"type": "array", "items": {"type": "string"}}},
342 )
343 """A list of source files to be processed."""
344 src_dir: Path = field(
345 default_factory=lambda: Path("./"), metadata={"schema": {"type": "string"}}
346 )
347
348 comment_type: CommentType = field(
349 default=CommentType.cpp, metadata={"schema": {"type": "string"}}
350 )
351 """The type of comment to be processed."""
352
353 get_need_id_refs: bool = field(
354 default=True, metadata={"schema": {"type": "boolean"}}
355 )
356 """Whether to extract need id references from comments"""
357
358 get_oneline_needs: bool = field(
359 default=False, metadata={"schema": {"type": "boolean"}}
360 )
361 """Whether to extract oneline needs from comments"""
362
363 get_rst: bool = field(default=False, metadata={"schema": {"type": "boolean"}})
364 """Whether to extract rst texts from comments"""
365
366 git_root: Path | None = field(
367 default=None, metadata={"schema": {"type": ["string", "null"]}}
368 )
369 """Explicit path to the Git repository root. If not set, it will be auto-detected
370 by traversing parent directories. Useful for Bazel builds or deeply nested configs."""
371
372 need_id_refs_config: NeedIdRefsConfig = field(default_factory=NeedIdRefsConfig)
373 """Configuration for extracting need id references from comments."""
374
375 marked_rst_config: MarkedRstConfig = field(default_factory=MarkedRstConfig)
376 """Configuration for extracting rst texts from comments."""
377
378 oneline_comment_style: OneLineCommentStyle = field(
379 default_factory=OneLineCommentStyle
380 )
381 """Configuration for extracting oneline needs from comments."""
382
383 @classmethod
384 def get_schema(cls, name: str) -> dict[str, Any] | None: # type: ignore[explicit-any]
385 _field = next(_field for _field in fields(cls) if _field.name is name)
386 if _field.metadata and "schema" in _field.metadata:
387 return cast(dict[str, Any], _field.metadata["schema"]) # type: ignore[explicit-any]
388 return None
389
390 def check_schema(self) -> list[str]:
391 errors = []
392 for _field_name in self.field_names():
393 schema = self.get_schema(_field_name)
394 if not schema:
395 continue
396 value = getattr(self, _field_name)
397 if isinstance(value, Path): # adapt to json schema restriction
398 value = str(value)
399 if _field_name == "src_files" and isinstance(
400 value, list
401 ): # adapt to json schema restriction
402 value: list[str] = [str(src_file) for src_file in value] # type: ignore[no-redef] # only for value adaptation
403 try:
404 validate(instance=value, schema=schema)
405 except ValidationError as e:
406 errors.append(
407 f"Schema validation error in field '{_field_name}': {e.message}"
408 )
409 return errors
410
411 def check_markers_mutually_exclusive(self) -> list[str]:
412 errors = set()
413 markers = set()
414 markers.add(self.oneline_comment_style.start_sequence)
415 markers.add(self.oneline_comment_style.end_sequence)
416 if self.marked_rst_config.start_sequence in markers:
417 errors.add(
418 f"Marker {self.marked_rst_config.start_sequence} is defined multiple times"
419 )
420 else:
421 markers.add(self.marked_rst_config.start_sequence)
422 if self.marked_rst_config.end_sequence in markers:
423 errors.add(
424 f"Marker {self.marked_rst_config.end_sequence} is defined multiple times"
425 )
426 else:
427 markers.add(self.marked_rst_config.end_sequence)
428
429 for marker in self.need_id_refs_config.markers:
430 if marker in markers:
431 errors.add(f"Marker {marker} is defined multiple times")
432 else:
433 markers.add(marker)
434 return list(errors)
435
436 def check_fields_configuration(self) -> list[str]:
437 errors: deque[str] = deque()
438 if self.get_need_id_refs:
439 need_id_refs_errors = self.need_id_refs_config.check_schema()
440 if need_id_refs_errors:
441 errors.appendleft("NeedIdRefs configuration errors:")
442 errors.extend(need_id_refs_errors)
443 if self.get_oneline_needs:
444 oneline_needs_errors = (
445 self.oneline_comment_style.check_fields_configuration()
446 )
447 if oneline_needs_errors:
448 errors.appendleft("OneLineCommentStyle configuration errors:")
449 errors.extend(oneline_needs_errors)
450 if self.get_rst:
451 marked_rst_errors = self.marked_rst_config.check_fields_configuration()
452 if marked_rst_errors:
453 errors.appendleft("MarkedRst configuration errors:")
454 errors.extend(self.marked_rst_config.check_fields_configuration())
455 analyse_errors = self.check_markers_mutually_exclusive() + self.check_schema()
456 if analyse_errors:
457 errors.appendleft("analyse configuration errors:")
458 errors.extend(analyse_errors)
459 return list(errors)
460
461
462SRC_TRACE_CACHE: str = "src_trace_cache"
463
464
465class SourceTracingLineHref:
466 """Global class for the mapping between source file line numbers and Sphinx documentation links."""
467
468 def __init__(self) -> None:
469 self.mappings: dict[str, dict[int, str]] = {}
470
471
472file_lineno_href = SourceTracingLineHref()
473
474
475class CodeLinksProjectConfigType(TypedDict, total=False):
476 """TypedDict defining the configuration structure for individual SrcTrace projects.
477
478 Contains both user-provided configuration:
479 - source_discover
480 - remote_url_pattern
481 - analyse
482 and runtime-generated configuration objects
483 - source_discover_config
484 - analyse_config
485 """
486
487 source_discover: SourceDiscoverSectionConfigType
488 remote_url_pattern: str
489 analyse: AnalyseSectionConfigType
490 source_discover_config: SourceDiscoverConfig
491 analyse_config: SourceAnalyseConfig
492
493
494class CodeLinksConfigType(TypedDict):
495 config_from_toml: str | None
496 set_local_url: bool
497 local_url_field: str
498 set_remote_url: bool
499 remote_url_field: str
500 outdir: Path
501 projects: dict[str, CodeLinksProjectConfigType]
502 debug_measurement: bool
503 debug_filters: bool
504
505
506@dataclass
507class CodeLinksConfig:
508 @classmethod
509 def from_sphinx(cls, sphinx_config: _SphinxConfig) -> "CodeLinksConfig":
510 obj = cls()
511 super().__setattr__(obj, "_sphinx_config", sphinx_config)
512 return obj
513
514 def __getattribute__(self, name: str) -> Any: # type: ignore[explicit-any]
515 if name.startswith("__") or name == "_sphinx_config":
516 return super().__getattribute__(name)
517 sphinx_config = (
518 object.__getattribute__(self, "_sphinx_config")
519 if "_sphinx_config" in self.__dict__
520 else None
521 )
522 if sphinx_config:
523 return getattr(
524 super().__getattribute__("_sphinx_config"), f"src_trace_{name}"
525 )
526
527 return object.__getattribute__(self, name)
528
529 def __setattr__(self, name: str, value: Any) -> None: # type: ignore[explicit-any]
530 if name == "_sphinx_config" and "src_trace_projects" in value:
531 src_trace_projects: dict[str, CodeLinksProjectConfigType] = value[
532 "src_trace_projects"
533 ]
534 generate_project_configs(src_trace_projects)
535
536 if name.startswith("__") or name == "_sphinx_config":
537 return super().__setattr__(name, value)
538
539 sphinx_config = (
540 object.__getattribute__(self, "_sphinx_config")
541 if "_sphinx_config" in self.__dict__
542 else None
543 )
544
545 if sphinx_config:
546 setattr(
547 super().__getattribute__("_sphinx_config"), f"src_trace_{name}", value
548 )
549
550 if name == "outdir" and isinstance(value, str):
551 # Ensure outdir is a Path object
552 value = Path(value)
553 return object.__setattr__(self, name, value)
554
555 @classmethod
556 def add_config_values(cls, app: Sphinx) -> None:
557 """Add all config values to Sphinx application"""
558 for item in fields(cls):
559 if item.default_factory is not MISSING:
560 default = item.default_factory()
561 elif item.default is not MISSING:
562 default = item.default
563 else:
564 raise Exception(f"Field {item.name} has no default value or factory")
565
566 name = item.name
567 app.add_config_value(
568 f"src_trace_{name}",
569 default,
570 item.metadata["rebuild"],
571 types=item.metadata["types"],
572 )
573
574 @classmethod
575 def field_names(cls) -> set[str]:
576 return {item.name for item in fields(cls)}
577
578 @classmethod
579 def get_schema(cls, name: str) -> dict[str, Any] | None: # type: ignore[explicit-any]
580 """Get the schema for a config item."""
581 _field = next(field for field in fields(cls) if field.name is name)
582 if _field.metadata and "schema" in _field.metadata:
583 return _field.metadata["schema"] # type: ignore[no-any-return]
584 return None
585
586 config_from_toml: str | None = field(
587 default=None,
588 metadata={
589 "rebuild": "env",
590 "types": (str, type(None)),
591 "schema": {
592 "type": ["string", "null"],
593 "examples": ["config.toml", None],
594 },
595 },
596 )
597 """Path to a TOML file to load configuration from."""
598
599 set_local_url: bool = field(
600 default=False,
601 metadata={
602 "rebuild": "env",
603 "types": (bool,),
604 "schema": {
605 "type": "boolean",
606 },
607 },
608 )
609 """Set the file URL in the extracted need."""
610
611 local_url_field: str = field(
612 default="local-url",
613 metadata={
614 "rebuild": "env",
615 "types": (str,),
616 "schema": {
617 "type": "string",
618 },
619 },
620 )
621 """The field name for the file URL in the extracted need."""
622
623 set_remote_url: bool = field(
624 default=False,
625 metadata={
626 "rebuild": "env",
627 "types": (bool,),
628 "schema": {
629 "type": "boolean",
630 },
631 },
632 )
633 remote_url_field: str = field(
634 default="remote-url",
635 metadata={
636 "rebuild": "env",
637 "types": (str,),
638 "schema": {
639 "type": "string",
640 },
641 },
642 )
643 """The field name for the remote URL in the extracted need."""
644
645 outdir: Path = field(
646 default=Path("output"),
647 metadata={"rebuild": "env", "types": (str), "schema": {"type": "string"}},
648 )
649 """The directory where the generated artifacts and their caches will be stored."""
650
651 projects: dict[str, CodeLinksProjectConfigType] = field(
652 default_factory=dict,
653 metadata={
654 "rebuild": "env",
655 "types": (),
656 "schema": {
657 "type": "object",
658 "additionalProperties": {
659 "type": "object",
660 "properties": {
661 "source_discover": {},
662 "analyse": {},
663 "remote_url_pattern": {},
664 "source_discover_config": {},
665 "analyse_config": {},
666 },
667 "additionalProperties": False,
668 },
669 },
670 },
671 )
672 """The configuration for the source tracing projects."""
673
674 debug_measurement: bool = field(
675 default=False, metadata={"rebuild": "html", "types": (bool,)}
676 )
677 """If True, log runtime information for various functions."""
678 debug_filters: bool = field(
679 default=False, metadata={"rebuild": "html", "types": (bool,)}
680 )
681 """If True, log filter processing runtime information."""
682
683
684def check_schema(config: CodeLinksConfig) -> list[str]:
685 """Check only first layer's of schema, so that the nested dict is not validated here."""
686 errors = []
687 for _field_name in CodeLinksConfig.field_names():
688 schema = CodeLinksConfig.get_schema(_field_name)
689 if not schema:
690 continue
691 value = getattr(config, _field_name)
692 if isinstance(value, Path): # adapt to json schema restriction
693 value = str(value)
694 try:
695 validate(instance=value, schema=schema)
696 except ValidationError as e:
697 errors.append(
698 f"Schema validation error in filed '{_field_name}': {e.message}"
699 )
700 return errors
701
702
703def check_project_configuration(config: CodeLinksConfig) -> list[str]:
704 """Check nested project configurations"""
705 errors = []
706
707 for project_name, project_config in config.projects.items():
708 project_errors: list[str] = []
709
710 # validate source_discover config
711 src_discover_config: SourceDiscoverConfig | None = project_config.get(
712 "source_discover_config"
713 )
714 src_discover_errors = []
715 if src_discover_config:
716 src_discover_errors.extend(src_discover_config.check_schema())
717
718 # validate analyse config
719 analyse_config: SourceAnalyseConfig | None = project_config.get(
720 "analyse_config"
721 )
722 analyse_errors = []
723 if analyse_config:
724 analyse_errors = analyse_config.check_fields_configuration()
725
726 # validate src-trace config
727 if config.set_remote_url and "remote_url_pattern" not in project_config:
728 project_errors.append(
729 "remote_url_pattern must be given, as set_remote_url is enabled"
730 )
731
732 if "remote_url_pattern" in project_config and not isinstance(
733 project_config["remote_url_pattern"], str
734 ):
735 project_errors.append("remote_url_pattern must be a string")
736
737 if analyse_errors or src_discover_errors or project_errors:
738 errors.append(f"Project '{project_name}' has the following errors:")
739 errors.extend(analyse_errors)
740 errors.extend(src_discover_errors)
741 errors.extend(project_errors)
742
743 return errors
744
745
746def check_configuration(config: CodeLinksConfig) -> list[str]:
747 errors = []
748 errors.extend(check_schema(config))
749 errors.extend(check_project_configuration(config))
750 return errors
751
752
753def convert_src_discovery_config(
754 config_dict: SourceDiscoverSectionConfigType | None,
755) -> SourceDiscoverConfig:
756 if config_dict:
757 src_discover_dict = {
758 key: (Path(value) if key == "src_dir" and isinstance(value, str) else value)
759 for key, value in config_dict.items()
760 }
761 src_discover_config = SourceDiscoverConfig(**src_discover_dict) # type: ignore[arg-type] # mypy is confused by dynamic assignment
762 else:
763 src_discover_config = SourceDiscoverConfig()
764
765 return src_discover_config
766
767
768def convert_analyse_config(
769 config_dict: AnalyseSectionConfigType | None,
770 src_discover: SourceDiscover | None = None,
771) -> SourceAnalyseConfig:
772 analyse_config_dict: SourceAnalyseConfigType = {}
773 if config_dict:
774 for k, v in config_dict.items():
775 if k not in {"online_comment_style", "need_id_refs", "marked_rst"}:
776 # Convert string paths to Path objects
777 if k in {"src_dir", "git_root"} and isinstance(v, str):
778 analyse_config_dict[k] = Path(v) # type: ignore[literal-required]
779 else:
780 analyse_config_dict[k] = v # type: ignore[literal-required] # dynamical assignment
781
782 # Get oneline_comment_style configuration
783 oneline_comment_style_dict: OneLineCommentStyleType | None = config_dict.get(
784 "oneline_comment_style"
785 )
786 oneline_comment_style: OneLineCommentStyle = (
787 convert_oneline_comment_style_config(oneline_comment_style_dict)
788 )
789
790 # Get need_id_refs configuration
791 need_id_refs_config_dict: NeedIdRefsConfigType | None = config_dict.get(
792 "need_id_refs"
793 )
794 need_id_refs_config = convert_need_id_refs_config(need_id_refs_config_dict)
795
796 # Get marked_rst configuration
797 marked_rst_config_dict: MarkedRstConfigType | None = config_dict.get(
798 "marked_rst"
799 )
800 marked_rst_config = convert_marked_rst_config(marked_rst_config_dict)
801
802 analyse_config_dict["need_id_refs_config"] = need_id_refs_config
803 analyse_config_dict["marked_rst_config"] = marked_rst_config
804 analyse_config_dict["oneline_comment_style"] = oneline_comment_style
805
806 if src_discover:
807 analyse_config_dict["src_files"] = src_discover.source_paths
808 analyse_config_dict["src_dir"] = src_discover.src_discover_config.src_dir
809 try:
810 analyse_config_dict["comment_type"] = CommentType(
811 src_discover.src_discover_config.comment_type
812 )
813 except ValueError:
814 # If invalid comment_type, keep the string value
815 # Validation will catch this error later
816 comment_type_str: str = src_discover.src_discover_config.comment_type
817 analyse_config_dict["comment_type"] = comment_type_str # type: ignore[typeddict-item]
818
819 return SourceAnalyseConfig(**analyse_config_dict)
820
821
822def convert_oneline_comment_style_config(
823 config_dict: OneLineCommentStyleType | None,
824) -> OneLineCommentStyle:
825 if config_dict is None:
826 oneline_comment_style = OneLineCommentStyle()
827 else:
828 try:
829 oneline_comment_style = OneLineCommentStyle(**config_dict)
830 except TypeError as e:
831 raise TypeError(f"Invalid oneline comment style configuration: {e}") from e
832 return oneline_comment_style
833
834
835def convert_need_id_refs_config(
836 config_dict: NeedIdRefsConfigType | None,
837) -> NeedIdRefsConfig:
838 if not config_dict:
839 need_id_refs_config = NeedIdRefsConfig()
840 else:
841 try:
842 need_id_refs_config = NeedIdRefsConfig(**config_dict)
843 except TypeError as e:
844 raise TypeError(f"Invalid oneline comment style configuration: {e}") from e
845 return need_id_refs_config
846
847
848def convert_marked_rst_config(
849 config_dict: MarkedRstConfigType | None,
850) -> MarkedRstConfig:
851 if not config_dict:
852 marked_rst_config = MarkedRstConfig()
853 else:
854 try:
855 marked_rst_config = MarkedRstConfig(**config_dict)
856 except TypeError as e:
857 raise TypeError(f"Invalid oneline comment style configuration: {e}") from e
858 return marked_rst_config
859
860
861def generate_project_configs(
862 project_configs: dict[str, CodeLinksProjectConfigType],
863) -> None:
864 """Generate configs of source discover and analyse from their classes dynamically."""
865 for project_config in project_configs.values():
866 # overwrite the config into different types on purpose
867 # covert dicts to their own classes
868 src_discover_section: SourceDiscoverSectionConfigType | None = cast(
869 SourceDiscoverSectionConfigType,
870 project_config.get("source_discover"),
871 )
872 source_discover_config = convert_src_discovery_config(src_discover_section)
873 project_config["source_discover_config"] = source_discover_config
874
875 analyse_section_config: AnalyseSectionConfigType | None = cast(
876 AnalyseSectionConfigType, project_config.get("analyse")
877 )
878 analyse_config = convert_analyse_config(analyse_section_config)
879 analyse_config.get_oneline_needs = True # force to get oneline_need
880 # Copy comment_type from source_discover_config to analyse_config
881 try:
882 analyse_config.comment_type = CommentType(
883 source_discover_config.comment_type
884 )
885 except ValueError:
886 # If invalid comment_type, keep the string value
887 # Validation will catch this error later
888 analyse_config.comment_type = source_discover_config.comment_type # type: ignore[assignment]
889 project_config["analyse_config"] = analyse_config