mirror of
https://github.com/mandiant/capa.git
synced 2025-12-22 23:26:21 -08:00
Merge pull request #1679 from Aayush-Goel-04/Aayush-Goel-04/Issue#1582
bump pydantic to 2.1.1
This commit is contained in:
@@ -3,7 +3,8 @@
|
|||||||
## master (unreleased)
|
## master (unreleased)
|
||||||
|
|
||||||
### New Features
|
### New Features
|
||||||
- ELF: implement file import and export name extractor #1607 @Aayush-Goel-04
|
- ELF: implement file import and export name extractor #1607 #1608 @Aayush-Goel-04
|
||||||
|
- bump pydantic from 1.10.9 to 2.1.1 #1582 @Aayush-Goel-04
|
||||||
|
|
||||||
### Breaking Changes
|
### Breaking Changes
|
||||||
|
|
||||||
|
|||||||
@@ -136,8 +136,8 @@ class Feature(abc.ABC): # noqa: B024
|
|||||||
import capa.features.freeze.features
|
import capa.features.freeze.features
|
||||||
|
|
||||||
return (
|
return (
|
||||||
capa.features.freeze.features.feature_from_capa(self).json()
|
capa.features.freeze.features.feature_from_capa(self).model_dump_json()
|
||||||
< capa.features.freeze.features.feature_from_capa(other).json()
|
< capa.features.freeze.features.feature_from_capa(other).model_dump_json()
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_name_str(self) -> str:
|
def get_name_str(self) -> str:
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import logging
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import List, Tuple, Union
|
from typing import List, Tuple, Union
|
||||||
|
|
||||||
from pydantic import Field, BaseModel
|
from pydantic import Field, BaseModel, ConfigDict
|
||||||
|
|
||||||
import capa.helpers
|
import capa.helpers
|
||||||
import capa.version
|
import capa.version
|
||||||
@@ -31,8 +31,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class HashableModel(BaseModel):
|
class HashableModel(BaseModel):
|
||||||
class Config:
|
model_config = ConfigDict(frozen=True)
|
||||||
frozen = True
|
|
||||||
|
|
||||||
|
|
||||||
class AddressType(str, Enum):
|
class AddressType(str, Enum):
|
||||||
@@ -46,7 +45,7 @@ class AddressType(str, Enum):
|
|||||||
|
|
||||||
class Address(HashableModel):
|
class Address(HashableModel):
|
||||||
type: AddressType
|
type: AddressType
|
||||||
value: Union[int, Tuple[int, int], None]
|
value: Union[int, Tuple[int, int], None] = None # None default value to support deserialization of NO_ADDRESS
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_capa(cls, a: capa.features.address.Address) -> "Address":
|
def from_capa(cls, a: capa.features.address.Address) -> "Address":
|
||||||
@@ -159,9 +158,7 @@ class BasicBlockFeature(HashableModel):
|
|||||||
basic_block: Address = Field(alias="basic block")
|
basic_block: Address = Field(alias="basic block")
|
||||||
address: Address
|
address: Address
|
||||||
feature: Feature
|
feature: Feature
|
||||||
|
model_config = ConfigDict(populate_by_name=True)
|
||||||
class Config:
|
|
||||||
allow_population_by_field_name = True
|
|
||||||
|
|
||||||
|
|
||||||
class InstructionFeature(HashableModel):
|
class InstructionFeature(HashableModel):
|
||||||
@@ -194,26 +191,20 @@ class FunctionFeatures(BaseModel):
|
|||||||
address: Address
|
address: Address
|
||||||
features: Tuple[FunctionFeature, ...]
|
features: Tuple[FunctionFeature, ...]
|
||||||
basic_blocks: Tuple[BasicBlockFeatures, ...] = Field(alias="basic blocks")
|
basic_blocks: Tuple[BasicBlockFeatures, ...] = Field(alias="basic blocks")
|
||||||
|
model_config = ConfigDict(populate_by_name=True)
|
||||||
class Config:
|
|
||||||
allow_population_by_field_name = True
|
|
||||||
|
|
||||||
|
|
||||||
class Features(BaseModel):
|
class Features(BaseModel):
|
||||||
global_: Tuple[GlobalFeature, ...] = Field(alias="global")
|
global_: Tuple[GlobalFeature, ...] = Field(alias="global")
|
||||||
file: Tuple[FileFeature, ...]
|
file: Tuple[FileFeature, ...]
|
||||||
functions: Tuple[FunctionFeatures, ...]
|
functions: Tuple[FunctionFeatures, ...]
|
||||||
|
model_config = ConfigDict(populate_by_name=True)
|
||||||
class Config:
|
|
||||||
allow_population_by_field_name = True
|
|
||||||
|
|
||||||
|
|
||||||
class Extractor(BaseModel):
|
class Extractor(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
version: str = capa.version.__version__
|
version: str = capa.version.__version__
|
||||||
|
model_config = ConfigDict(populate_by_name=True)
|
||||||
class Config:
|
|
||||||
allow_population_by_field_name = True
|
|
||||||
|
|
||||||
|
|
||||||
class Freeze(BaseModel):
|
class Freeze(BaseModel):
|
||||||
@@ -221,9 +212,7 @@ class Freeze(BaseModel):
|
|||||||
base_address: Address = Field(alias="base address")
|
base_address: Address = Field(alias="base address")
|
||||||
extractor: Extractor
|
extractor: Extractor
|
||||||
features: Features
|
features: Features
|
||||||
|
model_config = ConfigDict(populate_by_name=True)
|
||||||
class Config:
|
|
||||||
allow_population_by_field_name = True
|
|
||||||
|
|
||||||
|
|
||||||
def dumps(extractor: capa.features.extractors.base_extractor.FeatureExtractor) -> str:
|
def dumps(extractor: capa.features.extractors.base_extractor.FeatureExtractor) -> str:
|
||||||
@@ -324,7 +313,7 @@ def dumps(extractor: capa.features.extractors.base_extractor.FeatureExtractor) -
|
|||||||
) # type: ignore
|
) # type: ignore
|
||||||
# Mypy is unable to recognise `base_address` as a argument due to alias
|
# Mypy is unable to recognise `base_address` as a argument due to alias
|
||||||
|
|
||||||
return freeze.json()
|
return freeze.model_dump_json()
|
||||||
|
|
||||||
|
|
||||||
def loads(s: str) -> capa.features.extractors.base_extractor.FeatureExtractor:
|
def loads(s: str) -> capa.features.extractors.base_extractor.FeatureExtractor:
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
import binascii
|
import binascii
|
||||||
from typing import Union, Optional
|
from typing import Union, Optional
|
||||||
|
|
||||||
from pydantic import Field, BaseModel
|
from pydantic import Field, BaseModel, ConfigDict
|
||||||
|
|
||||||
import capa.features.file
|
import capa.features.file
|
||||||
import capa.features.insn
|
import capa.features.insn
|
||||||
@@ -17,9 +17,7 @@ import capa.features.basicblock
|
|||||||
|
|
||||||
|
|
||||||
class FeatureModel(BaseModel):
|
class FeatureModel(BaseModel):
|
||||||
class Config:
|
model_config = ConfigDict(frozen=True, populate_by_name=True)
|
||||||
frozen = True
|
|
||||||
allow_population_by_field_name = True
|
|
||||||
|
|
||||||
def to_capa(self) -> capa.features.common.Feature:
|
def to_capa(self) -> capa.features.common.Feature:
|
||||||
if isinstance(self, OSFeature):
|
if isinstance(self, OSFeature):
|
||||||
@@ -213,141 +211,141 @@ def feature_from_capa(f: capa.features.common.Feature) -> "Feature":
|
|||||||
class OSFeature(FeatureModel):
|
class OSFeature(FeatureModel):
|
||||||
type: str = "os"
|
type: str = "os"
|
||||||
os: str
|
os: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class ArchFeature(FeatureModel):
|
class ArchFeature(FeatureModel):
|
||||||
type: str = "arch"
|
type: str = "arch"
|
||||||
arch: str
|
arch: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class FormatFeature(FeatureModel):
|
class FormatFeature(FeatureModel):
|
||||||
type: str = "format"
|
type: str = "format"
|
||||||
format: str
|
format: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class MatchFeature(FeatureModel):
|
class MatchFeature(FeatureModel):
|
||||||
type: str = "match"
|
type: str = "match"
|
||||||
match: str
|
match: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class CharacteristicFeature(FeatureModel):
|
class CharacteristicFeature(FeatureModel):
|
||||||
type: str = "characteristic"
|
type: str = "characteristic"
|
||||||
characteristic: str
|
characteristic: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class ExportFeature(FeatureModel):
|
class ExportFeature(FeatureModel):
|
||||||
type: str = "export"
|
type: str = "export"
|
||||||
export: str
|
export: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class ImportFeature(FeatureModel):
|
class ImportFeature(FeatureModel):
|
||||||
type: str = "import"
|
type: str = "import"
|
||||||
import_: str = Field(alias="import")
|
import_: str = Field(alias="import")
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class SectionFeature(FeatureModel):
|
class SectionFeature(FeatureModel):
|
||||||
type: str = "section"
|
type: str = "section"
|
||||||
section: str
|
section: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class FunctionNameFeature(FeatureModel):
|
class FunctionNameFeature(FeatureModel):
|
||||||
type: str = "function name"
|
type: str = "function name"
|
||||||
function_name: str = Field(alias="function name")
|
function_name: str = Field(alias="function name")
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class SubstringFeature(FeatureModel):
|
class SubstringFeature(FeatureModel):
|
||||||
type: str = "substring"
|
type: str = "substring"
|
||||||
substring: str
|
substring: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class RegexFeature(FeatureModel):
|
class RegexFeature(FeatureModel):
|
||||||
type: str = "regex"
|
type: str = "regex"
|
||||||
regex: str
|
regex: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class StringFeature(FeatureModel):
|
class StringFeature(FeatureModel):
|
||||||
type: str = "string"
|
type: str = "string"
|
||||||
string: str
|
string: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class ClassFeature(FeatureModel):
|
class ClassFeature(FeatureModel):
|
||||||
type: str = "class"
|
type: str = "class"
|
||||||
class_: str = Field(alias="class")
|
class_: str = Field(alias="class")
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class NamespaceFeature(FeatureModel):
|
class NamespaceFeature(FeatureModel):
|
||||||
type: str = "namespace"
|
type: str = "namespace"
|
||||||
namespace: str
|
namespace: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class BasicBlockFeature(FeatureModel):
|
class BasicBlockFeature(FeatureModel):
|
||||||
type: str = "basic block"
|
type: str = "basic block"
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class APIFeature(FeatureModel):
|
class APIFeature(FeatureModel):
|
||||||
type: str = "api"
|
type: str = "api"
|
||||||
api: str
|
api: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class PropertyFeature(FeatureModel):
|
class PropertyFeature(FeatureModel):
|
||||||
type: str = "property"
|
type: str = "property"
|
||||||
access: Optional[str]
|
access: Optional[str] = None
|
||||||
property: str
|
property: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class NumberFeature(FeatureModel):
|
class NumberFeature(FeatureModel):
|
||||||
type: str = "number"
|
type: str = "number"
|
||||||
number: Union[int, float]
|
number: Union[int, float]
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class BytesFeature(FeatureModel):
|
class BytesFeature(FeatureModel):
|
||||||
type: str = "bytes"
|
type: str = "bytes"
|
||||||
bytes: str
|
bytes: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class OffsetFeature(FeatureModel):
|
class OffsetFeature(FeatureModel):
|
||||||
type: str = "offset"
|
type: str = "offset"
|
||||||
offset: int
|
offset: int
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class MnemonicFeature(FeatureModel):
|
class MnemonicFeature(FeatureModel):
|
||||||
type: str = "mnemonic"
|
type: str = "mnemonic"
|
||||||
mnemonic: str
|
mnemonic: str
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class OperandNumberFeature(FeatureModel):
|
class OperandNumberFeature(FeatureModel):
|
||||||
type: str = "operand number"
|
type: str = "operand number"
|
||||||
index: int
|
index: int
|
||||||
operand_number: int = Field(alias="operand number")
|
operand_number: int = Field(alias="operand number")
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class OperandOffsetFeature(FeatureModel):
|
class OperandOffsetFeature(FeatureModel):
|
||||||
type: str = "operand offset"
|
type: str = "operand offset"
|
||||||
index: int
|
index: int
|
||||||
operand_offset: int = Field(alias="operand offset")
|
operand_offset: int = Field(alias="operand offset")
|
||||||
description: Optional[str]
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
Feature = Union[
|
Feature = Union[
|
||||||
|
|||||||
@@ -1304,7 +1304,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
|||||||
idaapi.info("No program analysis to save.")
|
idaapi.info("No program analysis to save.")
|
||||||
return
|
return
|
||||||
|
|
||||||
s = self.resdoc_cache.json().encode("utf-8")
|
s = self.resdoc_cache.model_dump_json().encode("utf-8")
|
||||||
|
|
||||||
path = Path(self.ask_user_capa_json_file())
|
path = Path(self.ask_user_capa_json_file())
|
||||||
if not path.exists():
|
if not path.exists():
|
||||||
|
|||||||
@@ -11,4 +11,4 @@ from capa.engine import MatchResults
|
|||||||
|
|
||||||
|
|
||||||
def render(meta, rules: RuleSet, capabilities: MatchResults) -> str:
|
def render(meta, rules: RuleSet, capabilities: MatchResults) -> str:
|
||||||
return rd.ResultDocument.from_capa(meta, rules, capabilities).json(exclude_none=True)
|
return rd.ResultDocument.from_capa(meta, rules, capabilities).model_dump_json(exclude_none=True)
|
||||||
|
|||||||
@@ -126,7 +126,7 @@ def metadata_to_pb2(meta: rd.Metadata) -> capa_pb2.Metadata:
|
|||||||
timestamp=str(meta.timestamp),
|
timestamp=str(meta.timestamp),
|
||||||
version=meta.version,
|
version=meta.version,
|
||||||
argv=meta.argv,
|
argv=meta.argv,
|
||||||
sample=google.protobuf.json_format.ParseDict(meta.sample.dict(), capa_pb2.Sample()),
|
sample=google.protobuf.json_format.ParseDict(meta.sample.model_dump(), capa_pb2.Sample()),
|
||||||
analysis=capa_pb2.Analysis(
|
analysis=capa_pb2.Analysis(
|
||||||
format=meta.analysis.format,
|
format=meta.analysis.format,
|
||||||
arch=meta.analysis.arch,
|
arch=meta.analysis.arch,
|
||||||
@@ -393,7 +393,7 @@ def match_to_pb2(match: rd.Match) -> capa_pb2.Match:
|
|||||||
def rule_metadata_to_pb2(rule_metadata: rd.RuleMetadata) -> capa_pb2.RuleMetadata:
|
def rule_metadata_to_pb2(rule_metadata: rd.RuleMetadata) -> capa_pb2.RuleMetadata:
|
||||||
# after manual type conversions to the RuleMetadata, we can rely on the protobuf json parser
|
# after manual type conversions to the RuleMetadata, we can rely on the protobuf json parser
|
||||||
# conversions include tuple -> list and rd.Enum -> proto.enum
|
# conversions include tuple -> list and rd.Enum -> proto.enum
|
||||||
meta = dict_tuple_to_list_values(rule_metadata.dict())
|
meta = dict_tuple_to_list_values(rule_metadata.model_dump())
|
||||||
meta["scope"] = scope_to_pb2(meta["scope"])
|
meta["scope"] = scope_to_pb2(meta["scope"])
|
||||||
meta["attack"] = list(map(dict_tuple_to_list_values, meta.get("attack", [])))
|
meta["attack"] = list(map(dict_tuple_to_list_values, meta.get("attack", [])))
|
||||||
meta["mbc"] = list(map(dict_tuple_to_list_values, meta.get("mbc", [])))
|
meta["mbc"] = list(map(dict_tuple_to_list_values, meta.get("mbc", [])))
|
||||||
|
|||||||
@@ -7,9 +7,9 @@
|
|||||||
# See the License for the specific language governing permissions and limitations under the License.
|
# See the License for the specific language governing permissions and limitations under the License.
|
||||||
import datetime
|
import datetime
|
||||||
import collections
|
import collections
|
||||||
from typing import Dict, List, Tuple, Union, Optional
|
from typing import Dict, List, Tuple, Union, Literal, Optional
|
||||||
|
|
||||||
from pydantic import Field, BaseModel
|
from pydantic import Field, BaseModel, ConfigDict
|
||||||
|
|
||||||
import capa.rules
|
import capa.rules
|
||||||
import capa.engine
|
import capa.engine
|
||||||
@@ -23,14 +23,11 @@ from capa.helpers import assert_never
|
|||||||
|
|
||||||
|
|
||||||
class FrozenModel(BaseModel):
|
class FrozenModel(BaseModel):
|
||||||
class Config:
|
model_config = ConfigDict(frozen=True, extra="forbid")
|
||||||
frozen = True
|
|
||||||
extra = "forbid"
|
|
||||||
|
|
||||||
|
|
||||||
class Model(BaseModel):
|
class Model(BaseModel):
|
||||||
class Config:
|
model_config = ConfigDict(extra="forbid")
|
||||||
extra = "forbid"
|
|
||||||
|
|
||||||
|
|
||||||
class Sample(Model):
|
class Sample(Model):
|
||||||
@@ -105,13 +102,13 @@ class CompoundStatement(StatementModel):
|
|||||||
|
|
||||||
|
|
||||||
class SomeStatement(StatementModel):
|
class SomeStatement(StatementModel):
|
||||||
type = "some"
|
type: Literal["some"] = "some"
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
count: int
|
count: int
|
||||||
|
|
||||||
|
|
||||||
class RangeStatement(StatementModel):
|
class RangeStatement(StatementModel):
|
||||||
type = "range"
|
type: Literal["range"] = "range"
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
min: int
|
min: int
|
||||||
max: int
|
max: int
|
||||||
@@ -119,7 +116,7 @@ class RangeStatement(StatementModel):
|
|||||||
|
|
||||||
|
|
||||||
class SubscopeStatement(StatementModel):
|
class SubscopeStatement(StatementModel):
|
||||||
type = "subscope"
|
type: Literal["subscope"] = "subscope"
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
scope: capa.rules.Scope
|
scope: capa.rules.Scope
|
||||||
|
|
||||||
@@ -134,7 +131,7 @@ Statement = Union[
|
|||||||
|
|
||||||
|
|
||||||
class StatementNode(FrozenModel):
|
class StatementNode(FrozenModel):
|
||||||
type = "statement"
|
type: Literal["statement"] = "statement"
|
||||||
statement: Statement
|
statement: Statement
|
||||||
|
|
||||||
|
|
||||||
@@ -171,7 +168,7 @@ def statement_from_capa(node: capa.engine.Statement) -> Statement:
|
|||||||
|
|
||||||
|
|
||||||
class FeatureNode(FrozenModel):
|
class FeatureNode(FrozenModel):
|
||||||
type = "feature"
|
type: Literal["feature"] = "feature"
|
||||||
feature: frz.Feature
|
feature: frz.Feature
|
||||||
|
|
||||||
|
|
||||||
@@ -500,15 +497,12 @@ class MaecMetadata(FrozenModel):
|
|||||||
malware_family: Optional[str] = Field(None, alias="malware-family")
|
malware_family: Optional[str] = Field(None, alias="malware-family")
|
||||||
malware_category: Optional[str] = Field(None, alias="malware-category")
|
malware_category: Optional[str] = Field(None, alias="malware-category")
|
||||||
malware_category_ov: Optional[str] = Field(None, alias="malware-category-ov")
|
malware_category_ov: Optional[str] = Field(None, alias="malware-category-ov")
|
||||||
|
model_config = ConfigDict(frozen=True, populate_by_name=True)
|
||||||
class Config:
|
|
||||||
frozen = True
|
|
||||||
allow_population_by_field_name = True
|
|
||||||
|
|
||||||
|
|
||||||
class RuleMetadata(FrozenModel):
|
class RuleMetadata(FrozenModel):
|
||||||
name: str
|
name: str
|
||||||
namespace: Optional[str]
|
namespace: Optional[str] = None
|
||||||
authors: Tuple[str, ...]
|
authors: Tuple[str, ...]
|
||||||
scope: capa.rules.Scope
|
scope: capa.rules.Scope
|
||||||
attack: Tuple[AttackSpec, ...] = Field(alias="att&ck")
|
attack: Tuple[AttackSpec, ...] = Field(alias="att&ck")
|
||||||
@@ -546,9 +540,7 @@ class RuleMetadata(FrozenModel):
|
|||||||
) # type: ignore
|
) # type: ignore
|
||||||
# Mypy is unable to recognise arguments due to alias
|
# Mypy is unable to recognise arguments due to alias
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(frozen=True, populate_by_name=True)
|
||||||
frozen = True
|
|
||||||
allow_population_by_field_name = True
|
|
||||||
|
|
||||||
|
|
||||||
class RuleMatches(FrozenModel):
|
class RuleMatches(FrozenModel):
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ def render_statement(ostream, match: rd.Match, statement: rd.Statement, indent=0
|
|||||||
# so, we have to inline some of the feature rendering here.
|
# so, we have to inline some of the feature rendering here.
|
||||||
|
|
||||||
child = statement.child
|
child = statement.child
|
||||||
value = child.dict(by_alias=True).get(child.type)
|
value = child.model_dump(by_alias=True).get(child.type)
|
||||||
|
|
||||||
if value:
|
if value:
|
||||||
if isinstance(child, frzf.StringFeature):
|
if isinstance(child, frzf.StringFeature):
|
||||||
@@ -141,7 +141,7 @@ def render_feature(ostream, match: rd.Match, feature: frzf.Feature, indent=0):
|
|||||||
value = feature.class_
|
value = feature.class_
|
||||||
else:
|
else:
|
||||||
# convert attributes to dictionary using aliased names, if applicable
|
# convert attributes to dictionary using aliased names, if applicable
|
||||||
value = feature.dict(by_alias=True).get(key)
|
value = feature.model_dump(by_alias=True).get(key)
|
||||||
|
|
||||||
if value is None:
|
if value is None:
|
||||||
raise ValueError(f"{key} contains None")
|
raise ValueError(f"{key} contains None")
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ dependencies = [
|
|||||||
"pyelftools==0.29",
|
"pyelftools==0.29",
|
||||||
"dnfile==0.13.0",
|
"dnfile==0.13.0",
|
||||||
"dncil==1.0.2",
|
"dncil==1.0.2",
|
||||||
"pydantic==1.10.9",
|
"pydantic==2.1.1",
|
||||||
"protobuf==4.23.4",
|
"protobuf==4.23.4",
|
||||||
]
|
]
|
||||||
dynamic = ["version"]
|
dynamic = ["version"]
|
||||||
|
|||||||
@@ -144,8 +144,7 @@ def get_capa_results(args):
|
|||||||
meta.analysis.layout = capa.main.compute_layout(rules, extractor, capabilities)
|
meta.analysis.layout = capa.main.compute_layout(rules, extractor, capabilities)
|
||||||
|
|
||||||
doc = rd.ResultDocument.from_capa(meta, rules, capabilities)
|
doc = rd.ResultDocument.from_capa(meta, rules, capabilities)
|
||||||
|
return {"path": path, "status": "ok", "ok": doc.model_dump()}
|
||||||
return {"path": path, "status": "ok", "ok": doc.dict(exclude_none=True)}
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
def main(argv=None):
|
||||||
@@ -214,7 +213,9 @@ def main(argv=None):
|
|||||||
if result["status"] == "error":
|
if result["status"] == "error":
|
||||||
logger.warning(result["error"])
|
logger.warning(result["error"])
|
||||||
elif result["status"] == "ok":
|
elif result["status"] == "ok":
|
||||||
results[result["path"].as_posix()] = rd.ResultDocument.parse_obj(result["ok"]).json(exclude_none=True)
|
results[result["path"].as_posix()] = rd.ResultDocument.model_validate(result["ok"]).model_dump_json(
|
||||||
|
exclude_none=True
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"unexpected status: {result['status']}")
|
raise ValueError(f"unexpected status: {result['status']}")
|
||||||
|
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ def main(argv=None):
|
|||||||
rdpb.ParseFromString(pb)
|
rdpb.ParseFromString(pb)
|
||||||
|
|
||||||
rd = capa.render.proto.doc_from_pb2(rdpb)
|
rd = capa.render.proto.doc_from_pb2(rdpb)
|
||||||
print(rd.json(exclude_none=True, indent=2, sort_keys=True))
|
print(rd.model_dump_json(exclude_none=True, indent=2))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -236,7 +236,7 @@ def test_basic_block_node_from_capa():
|
|||||||
def assert_round_trip(rd: rdoc.ResultDocument):
|
def assert_round_trip(rd: rdoc.ResultDocument):
|
||||||
one = rd
|
one = rd
|
||||||
|
|
||||||
doc = one.json(exclude_none=True)
|
doc = one.model_dump_json(exclude_none=True)
|
||||||
two = rdoc.ResultDocument.parse_raw(doc)
|
two = rdoc.ResultDocument.parse_raw(doc)
|
||||||
|
|
||||||
# show the round trip works
|
# show the round trip works
|
||||||
@@ -244,14 +244,14 @@ def assert_round_trip(rd: rdoc.ResultDocument):
|
|||||||
# which works thanks to pydantic model equality.
|
# which works thanks to pydantic model equality.
|
||||||
assert one == two
|
assert one == two
|
||||||
# second by showing their json representations are the same.
|
# second by showing their json representations are the same.
|
||||||
assert one.json(exclude_none=True) == two.json(exclude_none=True)
|
assert one.model_dump_json(exclude_none=True) == two.model_dump_json(exclude_none=True)
|
||||||
|
|
||||||
# now show that two different versions are not equal.
|
# now show that two different versions are not equal.
|
||||||
three = copy.deepcopy(two)
|
three = copy.deepcopy(two)
|
||||||
three.meta.__dict__.update({"version": "0.0.0"})
|
three.meta.__dict__.update({"version": "0.0.0"})
|
||||||
assert one.meta.version != three.meta.version
|
assert one.meta.version != three.meta.version
|
||||||
assert one != three
|
assert one != three
|
||||||
assert one.json(exclude_none=True) != three.json(exclude_none=True)
|
assert one.model_dump_json(exclude_none=True) != three.model_dump_json(exclude_none=True)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|||||||
Reference in New Issue
Block a user