Skip to content

Commit

Permalink
Merge branch 'release-0.30'
Browse files Browse the repository at this point in the history
  • Loading branch information
k0retux committed Mar 1, 2023
2 parents fd2c4d0 + 9e96885 commit 4779b21
Show file tree
Hide file tree
Showing 94 changed files with 12,302 additions and 5,069 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Expand Up @@ -2,4 +2,5 @@
*.pyc
*~
.idea
.DS_Store
.DS_Store
venv
25 changes: 25 additions & 0 deletions .readthedocs.yaml
@@ -0,0 +1,25 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details

# Required
version: 2

# Set the version of Python and other tools you might need
build:
os: ubuntu-22.04
tools:
python: "3.10"

# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/source/conf.py

# If using Sphinx, optionally build your docs in additional formats such as PDF
formats:
- pdf

# Optionally declare the Python requirements required to build your docs
python:
install:
- requirements: docs/requirements.txt
12 changes: 8 additions & 4 deletions README.rst
Expand Up @@ -69,15 +69,17 @@ of ``fuddly`` itself. Usage is as follows:

Miscellaneous
-------------
+ Don't forget to populate ``~/fuddly_data/imported_data/`` with sample files for data
+ Don't forget to populate ``<fuddly data folder>/imported_data/`` with sample files for data
models that need it

+ Note that when the fuddly shell is launched, the path of the fuddly data folder is displayed as
well as its configuration folder

Dependencies
------------
+ Compatible with Python2 and Python3
+ Compatible with Python3
+ Mandatory:

- `six`_: Python 2/3 compatibility
- `sqlite3`_: SQLite3 data base

+ Optional:
Expand All @@ -88,11 +90,12 @@ Dependencies
- `serial`_: For serial port access
- `cups`_: Python bindings for libcups
- `rpyc`_: Remote Python Call (RPyC), a transparent and symmetric RPC library
- `pyxdg`_: XDG Base Directory support

+ For testing:

- `ddt`_: Used for data-driven tests
- `mock`_: Used for mocking (only needed in Python2)
- `mock`_: Used for mocking

+ For documentation generation:

Expand All @@ -113,3 +116,4 @@ Dependencies
.. _sphinx: http://sphinx-doc.org/
.. _texlive: https://www.tug.org/texlive/
.. _readthedocs theme: https://github.com/snide/sphinx_rtd_theme
.. _pyxdg: https://pypi.org/project/pyxdg/
7 changes: 2 additions & 5 deletions TODO
@@ -1,11 +1,8 @@
[NEW FEATURES]

- Add new IA infrastructure supporting the creation of data models (automatic discovery of data structure from raw data)
- Enhance current post-analysis tooling and add new features supporting investigation (diagrams, statistics, research by pattern, etc.)
- Add GDB/PIN/QEMU probes/managers
- Add FmkDB visualization tools
- Add support for automatic adaptation of fuzz test cases depending on
specific Target meta-data (HW architecture, programming language, ...)
- Implement new node types that leverage python-constraint, or more
powerfull constraint programming library

[ENHANCEMENT]

Expand Down
10 changes: 5 additions & 5 deletions data_models/file_formats/jpg.py
Expand Up @@ -50,8 +50,8 @@ class JPG_DataModel(DataModel):
name = 'jpg'

def _atom_absorption_additional_actions(self, atom):
x = atom['.*/SOF_hdr/X'].get_raw_value()
y = atom['.*/SOF_hdr/Y'].get_raw_value()
x = atom['.*/SOF_hdr/X'][0].get_raw_value()
y = atom['.*/SOF_hdr/Y'][0].get_raw_value()
d_priv = {'height':y, 'width':x}
atom.set_private(d_priv)
msg = "add private data: size [x:{:d}, y:{:d}]".format(x, y)
Expand Down Expand Up @@ -163,9 +163,9 @@ def build_data_model(self):

jpg_abs = jpg.get_clone(new_env=True)
jpg_abs.set_current_conf('ABS', recursive=True)
self.register_atom_for_absorption(jpg_abs,
absorb_constraints=AbsNoCsts(size=True, struct=True,
contents=True))
self.register_atom_for_decoding(jpg_abs,
absorb_constraints=AbsNoCsts(size=True, struct=True,
content=True))


data_model = JPG_DataModel()
Expand Down
29 changes: 29 additions & 0 deletions data_models/file_formats/json_dm.py
@@ -0,0 +1,29 @@
import json

from framework.data_model import *
from framework.value_types import *
from framework.dmhelpers.json import json_model_builder, json_builder

class JSON_DataModel(DataModel):

name = 'json'
file_extension = 'json'

def _create_atom_from_raw_data_specific(self, data, idx, filename):
json_data = json.loads(data)
node_name = 'json_'+filename[:-len(self.file_extension)-1]
if '$schema' in json_data:
try:
return json_model_builder(node_name=node_name, schema=json_data, ignore_pattern=False)
except:
print('\n*** WARNING: Node creation attempt failed. New attempt, but now ignore '
'regex patterns from string JSON types.')
return json_model_builder(node_name=node_name, schema=json_data, ignore_pattern=True)
else:
return json_builder(node_name=node_name, sample=json_data)

def build_data_model(self):
pass


data_model = JSON_DataModel()
3 changes: 3 additions & 0 deletions data_models/file_formats/json_dm_strategy.py
@@ -0,0 +1,3 @@
from framework.tactics_helpers import *

tactics = Tactics()
4 changes: 2 additions & 2 deletions data_models/file_formats/pdf.py
Expand Up @@ -1310,9 +1310,9 @@ def build_data_model(self):
with open(gr.workspace_folder + 'TEST_FUZZING_PDF-orig' + '.pdf', 'wb') as f:
f.write(val)

leaf0 = pdf.get_node_by_path('PDF.*leaf_0-0$').to_bytes()
leaf0 = pdf.get_first_node_by_path('PDF.*leaf_0-0$').to_bytes()
pdf.set_current_conf('ALT', root_regexp='PDF.*leaf_0-0$')
leaf1 = pdf.get_node_by_path('PDF.*leaf_0-0$').to_bytes()
leaf1 = pdf.get_first_node_by_path('PDF.*leaf_0-0$').to_bytes()

print(leaf0)
print(leaf1)
Expand Down
2 changes: 1 addition & 1 deletion data_models/file_formats/png.py
Expand Up @@ -106,7 +106,7 @@ def build_data_model(self):
png = mb.create_graph_from_desc(png_desc)

self.register(png)
self.register_atom_for_absorption(png, absorb_constraints=AbsNoCsts(size=True))
self.register_atom_for_decoding(png, absorb_constraints=AbsNoCsts(size=True))


data_model = PNG_DataModel()
Expand Down
4 changes: 2 additions & 2 deletions data_models/file_formats/zip.py
Expand Up @@ -321,8 +321,8 @@ def build_data_model(self):

pkzip_abs = pkzip.get_clone(new_env=True)
pkzip_abs.set_current_conf('ABS', recursive=True)
self.register_atom_for_absorption(pkzip_abs,
absorb_constraints=AbsNoCsts(size=True, struct=True))
self.register_atom_for_decoding(pkzip_abs,
absorb_constraints=AbsNoCsts(size=True, struct=True))

data_model = ZIP_DataModel()

28 changes: 14 additions & 14 deletions data_models/protocols/pppoe.py
Expand Up @@ -35,10 +35,10 @@ def build_data_model(self):

def cycle_tags(tag):
tag.freeze()
if tag['.*/type'].get_current_raw_val() == 0x102:
tag['.*/type'].unfreeze()
if tag['.*/type'][0].get_current_raw_val() == 0x102:
tag['.*/type'][0].unfreeze()
tag.freeze()
tag['.*/type'].unfreeze()
tag['.*/type'][0].unfreeze()
tag.unfreeze(reevaluate_constraints=True)
return tag

Expand Down Expand Up @@ -124,18 +124,18 @@ def cycle_tags(tag):
tag_node = mb.create_graph_from_desc(tag_desc)

tag_service_name = tag_node.get_clone('tag_sn')
tag_service_name['.*/type'].set_values(value_type=UINT16_be(values=[0x0101]))
tag_service_name['.*/type'][0].set_values(value_type=UINT16_be(values=[0x0101]))

tag_host_uniq = tag_node.get_clone('tag_host_uniq')
tag_host_uniq['.*/type'].set_values(value_type=UINT16_be(values=[0x0103]))
tag_host_uniq['.*/type'][0].set_values(value_type=UINT16_be(values=[0x0103]))

tag_host_uniq_pads = tag_host_uniq.get_clone()

tag_ac_name = tag_node.get_clone('tag_ac_name') # Access Concentrator Name
tag_ac_name['.*/type'].set_values(value_type=UINT16_be(values=[0x0102]))
tag_ac_name['.*/type'][0].set_values(value_type=UINT16_be(values=[0x0102]))

tag_sn_error = tag_node.get_clone('tag_sn_error') # Service Name Error
tag_sn_error['.*/type'].set_values(value_type=UINT16_be(values=[0x0202]))
tag_sn_error['.*/type'][0].set_values(value_type=UINT16_be(values=[0x0202]))

tag_service_name_pads = tag_service_name.get_clone()
tag_node_pads = tag_node.get_clone()
Expand Down Expand Up @@ -258,21 +258,21 @@ def cycle_tags(tag):
# pppoe_msg.make_random(recursive=True)

padi = pppoe_msg.get_clone('padi')
padi['.*/mac_dst'].set_values(value_type=String(values=[u'\xff\xff\xff\xff\xff\xff']))
padi['.*/code'].set_values(value_type=UINT8(values=[0x9]))
padi['.*/mac_dst'][0].set_values(value_type=String(values=[u'\xff\xff\xff\xff\xff\xff']))
padi['.*/code'][0].set_values(value_type=UINT8(values=[0x9]))

pado = pppoe_msg.get_clone('pado')
pado['.*/code'].set_values(value_type=UINT8(values=[0x7]))
# pado['.*/code'].clear_attr(MH.Attr.Mutable)
pado['.*/code'][0].set_values(value_type=UINT8(values=[0x7]))
# pado['.*/code'][0].clear_attr(MH.Attr.Mutable)

padr = pppoe_msg.get_clone('padr')
padr['.*/code'].set_values(value_type=UINT8(values=[0x19]))
padr['.*/code'][0].set_values(value_type=UINT8(values=[0x19]))

pads = pppoe_msg.get_clone('pads')
pads['.*/code'].set_values(value_type=UINT8(values=[0x65]))
pads['.*/code'][0].set_values(value_type=UINT8(values=[0x65]))

padt = pppoe_msg.get_clone('padt')
padt['.*/code'].set_values(value_type=UINT8(values=[0xa7]))
padt['.*/code'][0].set_values(value_type=UINT8(values=[0xa7]))

self.register(pppoe_msg, padi, pado, padr, pads, padt, tag_host_uniq)

Expand Down
23 changes: 12 additions & 11 deletions data_models/protocols/pppoe_strategy.py
Expand Up @@ -26,6 +26,7 @@
from framework.global_resources import *
from framework.data_model import MH
from framework.target_helpers import *
from framework.data import DataProcess

tactics = Tactics()

Expand All @@ -43,7 +44,7 @@ def retrieve_X_from_feedback(env, current_step, next_step, feedback, x='padi', u
elif x == 'padr':
if current_step.content is not None:
mac_src = current_step.content['.*/mac_src']
env.mac_src = mac_src
env.mac_src = mac_src[0] if mac_src is not None else None
else:
mac_src = env.mac_src
if mac_src is not None:
Expand Down Expand Up @@ -71,8 +72,8 @@ def retrieve_X_from_feedback(env, current_step, next_step, feedback, x='padi', u

if result[0] == AbsorbStatus.FullyAbsorbed:
try:
service_name = msg_x['.*/value/v101'].to_bytes()
mac_src = msg_x['.*/mac_src'].to_bytes()
service_name = msg_x['.*/value/v101'][0].to_bytes()
mac_src = msg_x['.*/mac_src'][0].to_bytes()
except:
continue
print(' [ {:s} received! ]'.format(x.upper()))
Expand All @@ -82,16 +83,16 @@ def retrieve_X_from_feedback(env, current_step, next_step, feedback, x='padi', u

host_uniq = msg_x['.*/value/v103']
if host_uniq is not None:
host_uniq = host_uniq.to_bytes()
host_uniq = host_uniq[0].to_bytes()
env.host_uniq = host_uniq
t_fix_pppoe_msg_fields.host_uniq = host_uniq

if update: # we update the seed of the data process
next_step.content.freeze()
try:
next_step.content['.*/tag_sn/value/v101'] = service_name
next_step.content['.*/tag_sn$'].unfreeze(recursive=True, reevaluate_constraints=True)
next_step.content['.*/tag_sn$'].freeze()
next_step.content['.*/tag_sn$'][0].unfreeze(recursive=True, reevaluate_constraints=True)
next_step.content['.*/tag_sn$'][0].freeze()
except:
pass

Expand Down Expand Up @@ -134,7 +135,7 @@ def disrupt_data(self, dm, target, prev_data):
n['.*/mac_dst'] = self.mac_src
prev_data.add_info("update 'mac_src'")
if not self.reevaluate_csts:
n['.*/mac_dst'].unfreeze(dont_change_state=True)
n['.*/mac_dst'][0].unfreeze(dont_change_state=True)
except:
print(error_msg.format('mac_dst'))
else:
Expand All @@ -152,11 +153,11 @@ def disrupt_data(self, dm, target, prev_data):

if self.host_uniq:
try:
if not n['.*/tag_host_uniq/.*/v103'].is_attr_set(MH.Attr.LOCKED) and \
not n['.*/tag_host_uniq/len'].is_attr_set(MH.Attr.LOCKED) and \
not n['.*/tag_host_uniq/type'].is_attr_set(MH.Attr.LOCKED):
if not n['.*/tag_host_uniq/.*/v103'][0].is_attr_set(MH.Attr.LOCKED) and \
not n['.*/tag_host_uniq/len'][0].is_attr_set(MH.Attr.LOCKED) and \
not n['.*/tag_host_uniq/type'][0].is_attr_set(MH.Attr.LOCKED):
n['.*/tag_host_uniq/.*/v103'] = self.host_uniq
tag_uniq = n['.*/tag_host_uniq$']
tag_uniq = n['.*/tag_host_uniq$'][0]
tag_uniq.unfreeze(recursive=True, reevaluate_constraints=True)
tag_uniq.freeze()
prev_data.add_info("update 'host_uniq' with: {!s}".format(self.host_uniq))
Expand Down
2 changes: 1 addition & 1 deletion data_models/protocols/usb.py
Expand Up @@ -104,7 +104,7 @@ def build_data_model(self):
{'name': 'wMaxPacketSize',
'contents': BitField(subfield_limits=[11,13,16],
subfield_val_extremums=[None,[0,2],[0,0]],
subfield_values=[[2**x for x in range(1,12)],None,[0]],
subfield_values=[[2**x for x in range(1,11)],None,[0]],
endian=VT.LittleEndian),
'random': True,
'alt': [
Expand Down

0 comments on commit 4779b21

Please sign in to comment.