Skip to content

Commit

Permalink
Merge branch 'release-0.25'
Browse files Browse the repository at this point in the history
  • Loading branch information
k0retux committed Aug 15, 2016
2 parents aced12d + 46cfc3f commit 8f9ebf1
Show file tree
Hide file tree
Showing 47 changed files with 5,098 additions and 3,016 deletions.
14 changes: 7 additions & 7 deletions README.rst
Expand Up @@ -49,22 +49,22 @@ About documentation
Launch fuddly test cases
------------------------

The file ``framework/test.py`` include all unit & integration test cases
The package ``test`` include all unit & integration test cases
of ``fuddly`` itself. Usage is as follows:

- To launch all the test, issue the command::
- To launch all the tests, issue the command::

>> python framework/test.py -a
>> python -m test -a

- To launch all the test but the longer ones, issue the command::
- To launch all the tests but the longer ones, issue the command::

>> python framework/test.py
>> python -m test

- To avoid data model specific test cases use the option ``--ignore-dm-specifics``

- To launch a specific test category issue the folowing command::
- To launch a specific test category issue the following command::

>> python framework/test.py <Test_Class>.<test_method>
>> python -m test test.<test_package>.<test_module>.<Test_Class>.<test_method>


Miscellaneous
Expand Down
9 changes: 8 additions & 1 deletion TODO
Expand Up @@ -5,9 +5,16 @@
- Add GDB/PIN/QEMU probes/managers
- Add support for evolutionary fuzzing
- Add FmkDB visualization tools
- Add support for automatic adaptation of fuzz test cases depending on
specific Target meta-data (HW architecture, programming language, ...)
- Implement new node types that leverage python-constraint, or more
powerfull constraint programming library

[ENHANCEMENT]

- Clean up test.py
- Add support for absorption of nodes leveraging the 'Bitfield collapse'
customization (i.e., absorption of bit-oriented nodes without a byte boundary).
(Counter-part of the generation supported feature.)
- Add support for absorption of nodes whose existence has not been resolved yet.
(Counter-part of the generation supported feature.)
- Clean up test/test_integration.py
42 changes: 21 additions & 21 deletions data_models/example.py
Expand Up @@ -60,9 +60,9 @@ def build_data_model(self):
kv.add_conf('ALT')
kv.set_values(tux_subparts_3, conf='ALT')

tux_subparts_4 = ['[\xc2]PLIP', '[\xc2]GLOUP']
tux_subparts_4 = [u'[\u00c2]PLIP', u'[\u00c2]GLOUP']
ku.add_conf('ALT')
ku.set_values(tux_subparts_4, conf='ALT')
ku.set_values(value_type=String(values=tux_subparts_4, codec='utf8'), conf='ALT')

idx = Node('IDX')
idx.set_values(value_type=SINT16_be(mini=4,maxi=40))
Expand Down Expand Up @@ -132,9 +132,9 @@ def build_data_model(self):
concat.set_func(fct, tux)

if sys.version_info[0] > 2:
fct = lambda x: b'___' + bytes(chr(x[1]), 'latin_1') + b'___'
fct = lambda x: b'___' + bytes(chr(x[1]), internal_repr_codec) + b'___'
else:
fct = lambda x: b'___' + bytes(x[1]) + b'___'
fct = lambda x: b'___' + x[1] + b'___'

concat.add_conf('ALT')
concat.set_func(fct, tux, conf='ALT')
Expand All @@ -146,7 +146,7 @@ def build_data_model(self):


evt1 = Node('EVT1')
evt1.set_values(value_type=SINT16_be(int_list=[-4]))
evt1.set_values(value_type=SINT16_be(values=[-4]))
evt1.set_fuzz_weight(10)

evt2 = Node('EVT2')
Expand Down Expand Up @@ -177,7 +177,7 @@ def build_data_model(self):
prefix.make_determinist()

te3 = Node('EVT3')
te3.set_values(value_type=BitField(subfield_sizes=[4,4], subfield_val_lists=[[0x5, 0x6], [0xF, 0xC]]))
te3.set_values(value_type=BitField(subfield_sizes=[4,4], subfield_values=[[0x5, 0x6], [0xF, 0xC]]))
te3.set_fuzz_weight(8)
# te3.make_determinist()

Expand All @@ -187,12 +187,12 @@ def build_data_model(self):
# te4.make_determinist()

te5 = Node('EVT5')
te5.set_values(value_type=INT_str(int_list=[9]))
te5.set_values(value_type=INT_str(values=[9]))
te5.cc.set_specific_fuzzy_values([666])
te5.set_fuzz_weight(6)

te6 = Node('EVT6')
vt = BitField(subfield_limits=[2,6,8,10], subfield_val_lists=[[4,2,1],[2,15,16,3],[2,3,0],[1]],
vt = BitField(subfield_limits=[2,6,8,10], subfield_values=[[4,2,1],[2,15,16,3],[2,3,0],[1]],
padding=0, lsb_padding=True, endian=VT.LittleEndian)
te6.set_values(value_type=vt)
te6.set_fuzz_weight(5)
Expand All @@ -201,7 +201,7 @@ def build_data_model(self):

te7 = Node('EVT7')
vt = BitField(subfield_sizes=[4,4,4],
subfield_val_lists=[[4,2,1], None, [2,3,0]],
subfield_values=[[4,2,1], None, [2,3,0]],
subfield_val_extremums=[None, [3, 15], None],
padding=0, lsb_padding=False, endian=VT.BigEndian)
te7.set_values(value_type=vt)
Expand All @@ -227,10 +227,10 @@ def build_data_model(self):
# Simple

tval1_bottom = Node('TV1_bottom')
vt = UINT16_be(int_list=[1,2,3,4,5,6])
vt = UINT16_be(values=[1,2,3,4,5,6])

# vt = BitField(subfield_sizes=[4,4,4],
# subfield_val_lists=[[4,2,1], None, [10,12,13]],
# subfield_values=[[4,2,1], None, [10,12,13]],
# subfield_val_extremums=[None, [14, 15], None],
# padding=0, lsb_padding=False, endian=VT.BigEndian)

Expand All @@ -241,7 +241,7 @@ def build_data_model(self):
sep_bottom_alt = Node('sep_bottom_alt', values=[' ;; '])

tval2_bottom = Node('TV2_bottom')
vt = UINT16_be(int_list=[0x42,0x43,0x44])
vt = UINT16_be(values=[0x42,0x43,0x44])
tval2_bottom.set_values(value_type=vt)

alt_tag = Node('AltTag', values=[' |AltTag| ', ' +AltTag+ '])
Expand Down Expand Up @@ -270,7 +270,7 @@ def build_data_model(self):
], conf='ALT')

tval2_bottom3 = Node('TV2_bottom3')
vt = UINT32_be(int_list=[0xF, 0x7])
vt = UINT32_be(values=[0xF, 0x7])
tval2_bottom3.set_values(value_type=vt)
bottom3 = Node('Bottom_3_NT')
bottom3.set_subnodes_with_csts([
Expand Down Expand Up @@ -307,7 +307,7 @@ def build_data_model(self):
### NonTerm

e = Node('TV2')
vt = UINT16_be(int_list=[1,2,3,4,5,6])
vt = UINT16_be(values=[1,2,3,4,5,6])
e.set_values(value_type=vt)
sep3 = Node('sep3', values=[' # '])
nt = Node('Bottom_NT')
Expand All @@ -319,7 +319,7 @@ def build_data_model(self):
sep2 = Node('sep2', values=[' -|#|- '])

e_val1 = Node('V1', values=['A', 'B', 'C'])
e_typedval1 = Node('TV1', value_type=UINT16_be(int_list=[1,2,3,4,5,6]))
e_typedval1 = Node('TV1', value_type=UINT16_be(values=[1,2,3,4,5,6]))
e_val2 = Node('V2', values=['X', 'Y', 'Z'])
e_val3 = Node('V3', values=['<', '>'])

Expand All @@ -346,7 +346,7 @@ def build_data_model(self):
'contents': [

{'contents': BitField(subfield_sizes=[21,2,1], endian=VT.BigEndian,
subfield_val_lists=[None, [0b10], [0,1]],
subfield_values=[None, [0b10], [0,1]],
subfield_val_extremums=[[500, 600], None, None]),
'name': 'val1',
'qty': (1, 5)},
Expand All @@ -357,13 +357,13 @@ def build_data_model(self):
'custo_set': MH.Custo.NTerm.FrozenCopy,
'custo_clear': MH.Custo.NTerm.MutableClone,
'separator': {'contents': {'name': 'sep',
'contents': String(val_list=['\n'], absorb_regexp=b'\n+'),
'contents': String(values=['\n'], absorb_regexp='\n+'),
'absorb_csts': AbsNoCsts(regexp=True)}},
'contents': [{
'section_type': MH.Random,
'contents': [

{'contents': String(val_list=['OK', 'KO'], size=2),
{'contents': String(values=['OK', 'KO'], size=2),
'name': 'val2'},

{'name': 'val21',
Expand All @@ -385,7 +385,7 @@ def build_data_model(self):
'sync_qty_with': 'val1',
'alt': [
{'conf': 'alt1',
'contents': SINT8(int_list=[1,4,8])},
'contents': SINT8(values=[1,4,8])},
{'conf': 'alt2',
'contents': UINT16_be(mini=0xeeee, maxi=0xff56),
'determinist': True}]}
Expand All @@ -395,10 +395,10 @@ def build_data_model(self):
{'section_type': MH.Pick,
'weights': (10,5),
'contents': [
{'contents': String(val_list=['PLIP', 'PLOP'], size=4),
{'contents': String(values=['PLIP', 'PLOP'], size=4),
'name': ('val21', 2)},

{'contents': SINT16_be(int_list=[-1, -3, -5, 7]),
{'contents': SINT16_be(values=[-1, -3, -5, 7]),
'name': ('val22', 2)}
]}
]}
Expand Down
2 changes: 1 addition & 1 deletion data_models/example_strategy.py
Expand Up @@ -81,7 +81,7 @@ class t_fuzz_tve_01(Disruptor):

def disrupt_data(self, dm, target, prev_data):

val = b"NEW_" + rand_string(mini=5, maxi=10, str_set='XYZRVW')
val = b"NEW_" + rand_string(mini=5, maxi=10, str_set='XYZRVW').encode('latin-1')

if prev_data.node:
prev_data.node.get_node_by_path('TVE.*EVT1$').set_frozen_value(val)
Expand Down
7 changes: 4 additions & 3 deletions data_models/file_formats/jpg.py
Expand Up @@ -24,6 +24,7 @@
from framework.data_model import *
from framework.data_model_helpers import *
from framework.value_types import *
from framework.global_resources import *

markers = {
'SOF': {0: 0xFFC0,
Expand Down Expand Up @@ -83,7 +84,7 @@ def build_data_model(self):
{'name': 'SOF_hdr',
'contents': [
{'name': 'F_marker',
'contents': UINT16_be(int_list=[m for m in markers['SOF'].values()])},
'contents': UINT16_be(values=[m for m in markers['SOF'].values()])},
{'name': 'Lf',
'contents': MH.LEN(vt=UINT16_be, base_len=8),
'node_args': 'F_CompGroup',
Expand All @@ -92,7 +93,7 @@ def build_data_model(self):
'contents': UINT16_be()}
]},
{'name': 'P',
'contents': UINT8(int_list=[8,12])},
'contents': UINT8(values=[8,12])},
{'name': 'Y',
'contents': UINT16_be(maxi=65535),
'specific_fuzzy_vals': [65500]},
Expand Down Expand Up @@ -128,7 +129,7 @@ def build_data_model(self):
{'name': 'SOS_hdr',
'contents': [
{'name': 'S_marker',
'contents': UINT16_be(int_list=[markers['SOS']])},
'contents': UINT16_be(values=[markers['SOS']])},
{'name': 'Ls',
'contents': MH.LEN(vt=UINT16_be, base_len=6),
'node_args': 'S_CompGroup',
Expand Down
32 changes: 16 additions & 16 deletions data_models/file_formats/pdf.py
Expand Up @@ -157,7 +157,7 @@ def get_number(name, int_m=0, int_M=2**40, dec_m=0, dec_M=2**20, enforce_unsigne

int_part = Node('int_part', value_type=INT_str(mini=int_m, maxi=int_M, determinist=False))
int_part.add_conf('ALT')
int_part.set_values(value_type=INT_str(int_list=[20000000]), conf='ALT')
int_part.set_values(value_type=INT_str(values=[20000000]), conf='ALT')

dot = Node('dot', values=['.'])
val = Node('val', value_type=INT_str(mini=dec_m, maxi=dec_M, determinist=False))
Expand Down Expand Up @@ -425,14 +425,14 @@ def _encode_stream_ascii(stream, enc_stream):
prefix = ["/Filter "])

def gen_length_func(e_stream):
return Node('length', value_type=INT_str(int_list=[len(e_stream.to_bytes())]))
return Node('length', value_type=INT_str(values=[len(e_stream.to_bytes())]))

if use_generator_func:
e_length = Node('length_wrapper')
e_length.set_generator_func(gen_length_func, func_node_arg=e_stream)
e_length.customize(GenFuncCusto(items_to_set=GenFuncCusto.CloneExtNodeArgs))
else:
e_length = Node('length', value_type=INT_str(int_list=[len(e_stream.to_bytes())]))
e_length = Node('length', value_type=INT_str(values=[len(e_stream.to_bytes())]))

e_length_entry = make_wrapped_node('E_Length',
node = e_length,
Expand Down Expand Up @@ -487,7 +487,7 @@ def get_jpg(name):

xobj_id = e_jpg_xobj.get_private()
e_resources_internals = make_wrapped_node('IMG_XObj_resource_' + name,
node=Node("xobj_id", value_type=INT_str(int_list=[xobj_id])),
node=Node("xobj_id", value_type=INT_str(values=[xobj_id])),
prefix=["<< /ProcSet [/PDF /ImageC]\n /XObject << /Im1 "],
suffix=[" 0 R >> >>"])
e_resources = PDFObj.create_wrapped_obj('IMG_XObj_resource_' + name, e_resources_internals)
Expand Down Expand Up @@ -522,14 +522,14 @@ def make_page_node(name, page_node_id, kids_id=[4444], parent_id=None, count=Non

cpt = count if count is not None else len(l)

e_count_nb = Node("count", value_type=INT_str(int_list=[cpt]))
e_count_nb = Node("count", value_type=INT_str(values=[cpt]))
e_count = make_wrapped_node("Count_E",
node=e_count_nb,
prefix=["/Count "],
suffix=["\n"])

if parent_id is not None:
e_parent_id = Node("parent_id", value_type=INT_str(int_list=[parent_id]))
e_parent_id = Node("parent_id", value_type=INT_str(values=[parent_id]))
e_parent = make_wrapped_node("Parent_E",
node=e_parent_id,
prefix=["/Parent "],
Expand All @@ -554,7 +554,7 @@ def make_page_leaf(name, parent_id=4444, resources_id=4444, contents_id=4444,

e_prefix = Node('prefix', values=["<<\n"])

e_parent_id = Node("parent_id", value_type=INT_str(int_list=[parent_id]))
e_parent_id = Node("parent_id", value_type=INT_str(values=[parent_id]))
e_parent = make_wrapped_node("Parent_E",
node=e_parent_id,
prefix=["/Parent "],
Expand All @@ -576,13 +576,13 @@ def make_page_leaf(name, parent_id=4444, resources_id=4444, contents_id=4444,
prefix=["/MediaBox "],
suffix=["\n"])

e_resources_id = Node("resource_id", value_type=INT_str(int_list=[resources_id]))
e_resources_id = Node("resource_id", value_type=INT_str(values=[resources_id]))
e_resources = make_wrapped_node("Resources_E",
node=e_resources_id,
prefix=["/Resources "],
suffix=[" 0 R\n"])

e_contents_id = Node("contents_id", value_type=INT_str(int_list=[contents_id]))
e_contents_id = Node("contents_id", value_type=INT_str(values=[contents_id]))
e_contents = make_wrapped_node("Contents_E",
node=e_contents_id,
prefix=["/Contents "],
Expand Down Expand Up @@ -759,7 +759,7 @@ def _generate_pdf_body(pdf_contents, args):
node_list = obj_list + pagetree_objs

e_raw_catalog = make_wrapped_node("Catalog",
node=Node("pagetree_id", value_type=INT_str(int_list=[pagetree_id])),
node=Node("pagetree_id", value_type=INT_str(values=[pagetree_id])),
prefix=["<<\n/Pages "],
suffix=[" 0 R\n/Type /Catalog\n>>"])
e_catalog = PDFObj.create_wrapped_obj("Catalog", e_raw_catalog)
Expand Down Expand Up @@ -794,15 +794,15 @@ def _generate_xref(objs):
# node_list last Node is the catalog
catalog_id = catalog.get_private()

val_list = list(map(lambda x: x.to_bytes(), node_list))
values = list(map(lambda x: x.to_bytes(), node_list))
sorted_node_list = sorted(node_list, key=lambda x: x.get_private())

nb_objs = len(node_list) + 1 # we have to count the object 0

off = header_len

objs_offset = {}
for v, e in zip(val_list, node_list):
for v, e in zip(values, node_list):
obj_len = len(v)
objs_offset[e] = off
off += obj_len
Expand Down Expand Up @@ -845,15 +845,15 @@ def _generate_xref_loop(objs):
# node_list last Node is the catalog
catalog_id = catalog.get_private()

val_list = list(map(lambda x: x.to_bytes(), node_list))
values = list(map(lambda x: x.to_bytes(), node_list))
sorted_node_list = sorted(node_list, key=lambda x: x.get_private())

nb_objs = len(node_list) + 1 # we have to count the object 0

off = header_len

objs_offset = {}
for v, e in zip(val_list, node_list):
for v, e in zip(values, node_list):
obj_len = len(v)
objs_offset[e] = off
off += obj_len
Expand Down Expand Up @@ -1081,7 +1081,7 @@ def change_kids_id(self, kids_id, count_update=None):
self.e_kids_id.set_subnodes_basic(rawlist)

if count_update is not None:
self.e_count_nb.set_values(value_type=INT_str(int_list=[count_update]))
self.e_count_nb.set_values(value_type=INT_str(values=[count_update]))


class PageLeaf(object):
Expand All @@ -1096,7 +1096,7 @@ def get_id(self):
return self.e_leaf.get_private()

def set_parent_id(self, pid):
self.e_parent_id.set_values(value_type=INT_str(int_list=[pid]))
self.e_parent_id.set_values(value_type=INT_str(values=[pid]))

def set_actions(self, subnodes=None, vals=None):
if subnodes is not None:
Expand Down

0 comments on commit 8f9ebf1

Please sign in to comment.