diff --git a/lib/hachoir_core/__init__.py b/lib/hachoir_core/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..df1988fda2f02ba08dfed6d6009dc19df68b0c7b
--- /dev/null
+++ b/lib/hachoir_core/__init__.py
@@ -0,0 +1,2 @@
+from hachoir_core.version import VERSION as __version__, PACKAGE, WEBSITE, LICENSE
+
diff --git a/lib/hachoir_core/benchmark.py b/lib/hachoir_core/benchmark.py
new file mode 100644
index 0000000000000000000000000000000000000000..f823cfae1038d844d5e498ec5c68aeffc22ff671
--- /dev/null
+++ b/lib/hachoir_core/benchmark.py
@@ -0,0 +1,210 @@
+from hachoir_core.tools import humanDurationNanosec
+from hachoir_core.i18n import _
+from math import floor
+from time import time
+
+class BenchmarkError(Exception):
+    """
+    Error during benchmark, use str(err) to format it as string.
+    """
+    def __init__(self, message):
+        Exception.__init__(self,
+            "Benchmark internal error: %s" % message)
+
+class BenchmarkStat:
+    """
+    Benchmark statistics. This class automatically computes minimum value,
+    maximum value and sum of all values.
+
+    Methods:
+    - append(value): append a value
+    - getMin(): minimum value
+    - getMax(): maximum value
+    - getSum(): sum of all values
+    - __len__(): get number of elements
+    - __nonzero__(): isn't empty?
+    """
+    def __init__(self):
+        self._values = []
+
+    def append(self, value):
+        self._values.append(value)
+        try:
+            self._min = min(self._min, value)
+            self._max = max(self._max, value)
+            self._sum += value
+        except AttributeError:
+            self._min = value
+            self._max = value
+            self._sum = value
+
+    def __len__(self):
+        return len(self._values)
+
+    def __nonzero__(self):
+        return bool(self._values)
+
+    def getMin(self):
+        return self._min
+
+    def getMax(self):
+        return self._max
+
+    def getSum(self):
+        return self._sum
+
+class Benchmark:
+    def __init__(self, max_time=5.0,
+    min_count=5, max_count=None, progress_time=1.0):
+        """
+        Constructor:
+        - max_time: Maximum wanted duration of the whole benchmark
+          (default: 5 seconds, minimum: 1 second).
+        - min_count: Minimum number of function calls to get good statistics
+          (defaut: 5, minimum: 1).
+        - progress_time: Time between each "progress" message
+          (default: 1 second, minimum: 250 ms).
+        - max_count: Maximum number of function calls (default: no limit).
+        - verbose: Is verbose? (default: False)
+        - disable_gc: Disable garbage collector? (default: False)
+        """
+        self.max_time = max(max_time, 1.0)
+        self.min_count = max(min_count, 1)
+        self.max_count = max_count
+        self.progress_time = max(progress_time, 0.25)
+        self.verbose = False
+        self.disable_gc = False
+
+    def formatTime(self, value):
+        """
+        Format a time delta to string: use humanDurationNanosec()
+        """
+        return humanDurationNanosec(value * 1000000000)
+
+    def displayStat(self, stat):
+        """
+        Display statistics to stdout:
+        - best time (minimum)
+        - average time (arithmetic average)
+        - worst time (maximum)
+        - total time (sum)
+
+        Use arithmetic avertage instead of geometric average because
+        geometric fails if any value is zero (returns zero) and also
+        because floating point multiplication lose precision with many
+        values.
+        """
+        average = stat.getSum() / len(stat)
+        values = (stat.getMin(), average, stat.getMax(), stat.getSum())
+        values = tuple(self.formatTime(value) for value in values)
+        print _("Benchmark: best=%s  average=%s  worst=%s  total=%s") \
+            % values
+
+    def _runOnce(self, func, args, kw):
+        before = time()
+        func(*args, **kw)
+        after = time()
+        return after - before
+
+    def _run(self, func, args, kw):
+        """
+        Call func(*args, **kw) as many times as needed to get
+        good statistics. Algorithm:
+        - call the function once
+        - compute needed number of calls
+        - and then call function N times
+
+        To compute number of calls, parameters are:
+        - time of first function call
+        - minimum number of calls (min_count attribute)
+        - maximum test time (max_time attribute)
+
+        Notice: The function will approximate number of calls.
+        """
+        # First call of the benchmark
+        stat = BenchmarkStat()
+        diff = self._runOnce(func, args, kw)
+        best = diff
+        stat.append(diff)
+        total_time = diff
+
+        # Compute needed number of calls
+        count = int(floor(self.max_time / diff))
+        count = max(count, self.min_count)
+        if self.max_count:
+            count = min(count, self.max_count)
+
+        # Not other call? Just exit
+        if count == 1:
+            return stat
+        estimate = diff * count
+        if self.verbose:
+            print _("Run benchmark: %s calls (estimate: %s)") \
+                % (count, self.formatTime(estimate))
+
+        display_progress = self.verbose and (1.0 <= estimate)
+        total_count = 1
+        while total_count < count:
+            # Run benchmark and display each result
+            if display_progress:
+                print _("Result %s/%s: %s  (best: %s)") % \
+                    (total_count, count,
+                    self.formatTime(diff), self.formatTime(best))
+            part = count - total_count
+
+            # Will takes more than one second?
+            average = total_time / total_count
+            if self.progress_time < part * average:
+                part = max( int(self.progress_time / average), 1)
+            for index in xrange(part):
+                diff = self._runOnce(func, args, kw)
+                stat.append(diff)
+                total_time += diff
+                best = min(diff, best)
+            total_count += part
+        if display_progress:
+            print _("Result %s/%s: %s  (best: %s)") % \
+                (count, count,
+                self.formatTime(diff), self.formatTime(best))
+        return stat
+
+    def validateStat(self, stat):
+        """
+        Check statistics and raise a BenchmarkError if they are invalid.
+        Example of tests: reject empty stat, reject stat with only nul values.
+        """
+        if not stat:
+            raise BenchmarkError("empty statistics")
+        if not stat.getSum():
+            raise BenchmarkError("nul statistics")
+
+    def run(self, func, *args, **kw):
+        """
+        Run function func(*args, **kw), validate statistics,
+        and display the result on stdout.
+
+        Disable garbage collector if asked too.
+        """
+
+        # Disable garbarge collector is needed and if it does exist
+        # (Jython 2.2 don't have it for example)
+        if self.disable_gc:
+            try:
+                import gc
+            except ImportError:
+                self.disable_gc = False
+        if self.disable_gc:
+            gc_enabled = gc.isenabled()
+            gc.disable()
+        else:
+            gc_enabled = False
+
+        # Run the benchmark
+        stat = self._run(func, args, kw)
+        if gc_enabled:
+            gc.enable()
+
+        # Validate and display stats
+        self.validateStat(stat)
+        self.displayStat(stat)
+
diff --git a/lib/hachoir_core/bits.py b/lib/hachoir_core/bits.py
new file mode 100644
index 0000000000000000000000000000000000000000..d5b31a018b5e3d1bb61e9a6be8874f11414352d1
--- /dev/null
+++ b/lib/hachoir_core/bits.py
@@ -0,0 +1,277 @@
+"""
+Utilities to convert integers and binary strings to binary (number), binary
+string, number, hexadecimal, etc.
+"""
+
+from hachoir_core.endian import BIG_ENDIAN, LITTLE_ENDIAN
+from hachoir_core.compatibility import reversed
+from itertools import chain, repeat
+from struct import calcsize, unpack, error as struct_error
+
+def swap16(value):
+    """
+    Swap byte between big and little endian of a 16 bits integer.
+
+    >>> "%x" % swap16(0x1234)
+    '3412'
+    """
+    return (value & 0xFF) << 8 | (value >> 8)
+
+def swap32(value):
+    """
+    Swap byte between big and little endian of a 32 bits integer.
+
+    >>> "%x" % swap32(0x12345678)
+    '78563412'
+    """
+    value = long(value)
+    return ((value & 0x000000FFL) << 24) \
+         | ((value & 0x0000FF00L) << 8) \
+         | ((value & 0x00FF0000L) >> 8) \
+         | ((value & 0xFF000000L) >> 24)
+
+def bin2long(text, endian):
+    """
+    Convert binary number written in a string into an integer.
+    Skip characters differents than "0" and "1".
+
+    >>> bin2long("110", BIG_ENDIAN)
+    6
+    >>> bin2long("110", LITTLE_ENDIAN)
+    3
+    >>> bin2long("11 00", LITTLE_ENDIAN)
+    3
+    """
+    assert endian in (LITTLE_ENDIAN, BIG_ENDIAN)
+    bits = [ (ord(character)-ord("0")) \
+        for character in text if character in "01" ]
+    assert len(bits) != 0
+    if endian is not BIG_ENDIAN:
+        bits = reversed(bits)
+    value = 0
+    for bit in bits:
+        value *= 2
+        value += bit
+    return value
+
+def str2hex(value, prefix="", glue=u"", format="%02X"):
+    r"""
+    Convert binary string in hexadecimal (base 16).
+
+    >>> str2hex("ABC")
+    u'414243'
+    >>> str2hex("\xF0\xAF", glue=" ")
+    u'F0 AF'
+    >>> str2hex("ABC", prefix="0x")
+    u'0x414243'
+    >>> str2hex("ABC", format=r"\x%02X")
+    u'\\x41\\x42\\x43'
+    """
+    if isinstance(glue, str):
+        glue = unicode(glue)
+    if 0 < len(prefix):
+        text = [prefix]
+    else:
+        text = []
+    for character in value:
+        text.append(format % ord(character))
+    return glue.join(text)
+
+def countBits(value):
+    """
+    Count number of bits needed to store a (positive) integer number.
+
+    >>> countBits(0)
+    1
+    >>> countBits(1000)
+    10
+    >>> countBits(44100)
+    16
+    >>> countBits(18446744073709551615)
+    64
+    """
+    assert 0 <= value
+    count = 1
+    bits = 1
+    while (1 << bits) <= value:
+        count  += bits
+        value >>= bits
+        bits <<= 1
+    while 2 <= value:
+        if bits != 1:
+            bits >>= 1
+        else:
+            bits -= 1
+        while (1 << bits) <= value:
+            count  += bits
+            value >>= bits
+    return count
+
+def byte2bin(number, classic_mode=True):
+    """
+    Convert a byte (integer in 0..255 range) to a binary string.
+    If classic_mode is true (default value), reverse bits.
+
+    >>> byte2bin(10)
+    '00001010'
+    >>> byte2bin(10, False)
+    '01010000'
+    """
+    text = ""
+    for i in range(0, 8):
+        if classic_mode:
+            mask = 1 << (7-i)
+        else:
+            mask = 1 << i
+        if (number & mask) == mask:
+            text += "1"
+        else:
+            text += "0"
+    return text
+
+def long2raw(value, endian, size=None):
+    r"""
+    Convert a number (positive and not nul) to a raw string.
+    If size is given, add nul bytes to fill to size bytes.
+
+    >>> long2raw(0x1219, BIG_ENDIAN)
+    '\x12\x19'
+    >>> long2raw(0x1219, BIG_ENDIAN, 4)   # 32 bits
+    '\x00\x00\x12\x19'
+    >>> long2raw(0x1219, LITTLE_ENDIAN, 4)   # 32 bits
+    '\x19\x12\x00\x00'
+    """
+    assert (not size and 0 < value) or (0 <= value)
+    assert endian in (LITTLE_ENDIAN, BIG_ENDIAN)
+    text = []
+    while (value != 0 or text == ""):
+        byte = value % 256
+        text.append( chr(byte) )
+        value >>= 8
+    if size:
+        need = max(size - len(text), 0)
+    else:
+        need = 0
+    if need:
+        if endian is BIG_ENDIAN:
+            text = chain(repeat("\0", need), reversed(text))
+        else:
+            text = chain(text, repeat("\0", need))
+    else:
+        if endian is BIG_ENDIAN:
+            text = reversed(text)
+    return "".join(text)
+
+def long2bin(size, value, endian, classic_mode=False):
+    """
+    Convert a number into bits (in a string):
+    - size: size in bits of the number
+    - value: positive (or nul) number
+    - endian: BIG_ENDIAN (most important bit first)
+      or LITTLE_ENDIAN (least important bit first)
+    - classic_mode (default: False): reverse each packet of 8 bits
+
+    >>> long2bin(16, 1+4 + (1+8)*256, BIG_ENDIAN)
+    '10100000 10010000'
+    >>> long2bin(16, 1+4 + (1+8)*256, BIG_ENDIAN, True)
+    '00000101 00001001'
+    >>> long2bin(16, 1+4 + (1+8)*256, LITTLE_ENDIAN)
+    '00001001 00000101'
+    >>> long2bin(16, 1+4 + (1+8)*256, LITTLE_ENDIAN, True)
+    '10010000 10100000'
+    """
+    text = ""
+    assert endian in (LITTLE_ENDIAN, BIG_ENDIAN)
+    assert 0 <= value
+    for index in xrange(size):
+        if (value & 1) == 1:
+            text += "1"
+        else:
+            text += "0"
+        value >>= 1
+    if endian is LITTLE_ENDIAN:
+        text = text[::-1]
+    result = ""
+    while len(text) != 0:
+        if len(result) != 0:
+            result += " "
+        if classic_mode:
+            result += text[7::-1]
+        else:
+            result += text[:8]
+        text = text[8:]
+    return result
+
+def str2bin(value, classic_mode=True):
+    r"""
+    Convert binary string to binary numbers.
+    If classic_mode  is true (default value), reverse bits.
+
+    >>> str2bin("\x03\xFF")
+    '00000011 11111111'
+    >>> str2bin("\x03\xFF", False)
+    '11000000 11111111'
+    """
+    text = ""
+    for character in value:
+        if text != "":
+            text += " "
+        byte = ord(character)
+        text += byte2bin(byte, classic_mode)
+    return text
+
+def _createStructFormat():
+    """
+    Create a dictionnary (endian, size_byte) => struct format used
+    by str2long() to convert raw data to positive integer.
+    """
+    format = {
+        BIG_ENDIAN:    {},
+        LITTLE_ENDIAN: {},
+    }
+    for struct_format in "BHILQ":
+        try:
+            size = calcsize(struct_format)
+            format[BIG_ENDIAN][size] = '>%s' % struct_format
+            format[LITTLE_ENDIAN][size] = '<%s' % struct_format
+        except struct_error:
+            pass
+    return format
+_struct_format = _createStructFormat()
+
+def str2long(data, endian):
+    r"""
+    Convert a raw data (type 'str') into a long integer.
+
+    >>> chr(str2long('*', BIG_ENDIAN))
+    '*'
+    >>> str2long("\x00\x01\x02\x03", BIG_ENDIAN) == 0x10203
+    True
+    >>> str2long("\x2a\x10", LITTLE_ENDIAN) == 0x102a
+    True
+    >>> str2long("\xff\x14\x2a\x10", BIG_ENDIAN) == 0xff142a10
+    True
+    >>> str2long("\x00\x01\x02\x03", LITTLE_ENDIAN) == 0x3020100
+    True
+    >>> str2long("\xff\x14\x2a\x10\xab\x00\xd9\x0e", BIG_ENDIAN) == 0xff142a10ab00d90e
+    True
+    >>> str2long("\xff\xff\xff\xff\xff\xff\xff\xff", BIG_ENDIAN) == (2**64-1)
+    True
+    """
+    assert 1 <= len(data) <= 32   # arbitrary limit: 256 bits
+    try:
+        return unpack(_struct_format[endian][len(data)], data)[0]
+    except KeyError:
+        pass
+
+    assert endian in (BIG_ENDIAN, LITTLE_ENDIAN)
+    shift = 0
+    value = 0
+    if endian is BIG_ENDIAN:
+        data = reversed(data)
+    for character in data:
+        byte = ord(character)
+        value += (byte << shift)
+        shift += 8
+    return value
+
diff --git a/lib/hachoir_core/cmd_line.py b/lib/hachoir_core/cmd_line.py
new file mode 100644
index 0000000000000000000000000000000000000000..8c4178df0a66e04d6720687d9b0e7f7e6a9ca77a
--- /dev/null
+++ b/lib/hachoir_core/cmd_line.py
@@ -0,0 +1,43 @@
+from optparse import OptionGroup
+from hachoir_core.log import log
+from hachoir_core.i18n import _, getTerminalCharset
+from hachoir_core.tools import makePrintable
+import hachoir_core.config as config
+
+def getHachoirOptions(parser):
+    """
+    Create an option group (type optparse.OptionGroup) of Hachoir
+    library options.
+    """
+    def setLogFilename(*args):
+        log.setFilename(args[2])
+
+    common = OptionGroup(parser, _("Hachoir library"), \
+        "Configure Hachoir library")
+    common.add_option("--verbose", help=_("Verbose mode"),
+        default=False, action="store_true")
+    common.add_option("--log", help=_("Write log in a file"),
+        type="string", action="callback", callback=setLogFilename)
+    common.add_option("--quiet", help=_("Quiet mode (don't display warning)"),
+        default=False, action="store_true")
+    common.add_option("--debug", help=_("Debug mode"),
+        default=False, action="store_true")
+    return common
+
+def configureHachoir(option):
+    # Configure Hachoir using "option" (value from optparse)
+    if option.quiet:
+      config.quiet = True
+    if option.verbose:
+      config.verbose = True
+    if option.debug:
+      config.debug = True
+
+def unicodeFilename(filename, charset=None):
+    if not charset:
+        charset = getTerminalCharset()
+    try:
+        return unicode(filename, charset)
+    except UnicodeDecodeError:
+        return makePrintable(filename, charset, to_unicode=True)
+
diff --git a/lib/hachoir_core/compatibility.py b/lib/hachoir_core/compatibility.py
new file mode 100644
index 0000000000000000000000000000000000000000..caff43cf6f5bd18a29d112df5352de57850d53f0
--- /dev/null
+++ b/lib/hachoir_core/compatibility.py
@@ -0,0 +1,185 @@
+"""
+Compatibility constants and functions. This module works on Python 1.5 to 2.5.
+
+This module provides:
+- True and False constants ;
+- any() and all() function ;
+- has_yield and has_slice values ;
+- isinstance() with Python 2.3 behaviour ;
+- reversed() and sorted() function.
+
+
+True and False constants
+========================
+
+Truth constants: True is yes (one) and False is no (zero).
+
+>>> int(True), int(False)     # int value
+(1, 0)
+>>> int(False | True)         # and binary operator
+1
+>>> int(True & False)         # or binary operator
+0
+>>> int(not(True) == False)   # not binary operator
+1
+
+Warning: on Python smaller than 2.3, True and False are aliases to
+number 1 and 0. So "print True" will displays 1 and not True.
+
+
+any() function
+==============
+
+any() returns True if at least one items is True, or False otherwise.
+
+>>> any([False, True])
+True
+>>> any([True, True])
+True
+>>> any([False, False])
+False
+
+
+all() function
+==============
+
+all() returns True if all items are True, or False otherwise.
+This function is just apply binary and operator (&) on all values.
+
+>>> all([True, True])
+True
+>>> all([False, True])
+False
+>>> all([False, False])
+False
+
+
+has_yield boolean
+=================
+
+has_yield: boolean which indicatese if the interpreter supports yield keyword.
+yield keyworkd is available since Python 2.0.
+
+
+has_yield boolean
+=================
+
+has_slice: boolean which indicates if the interpreter supports slices with step
+argument or not. slice with step is available since Python 2.3.
+
+
+reversed() and sorted() function
+================================
+
+reversed() and sorted() function has been introduced in Python 2.4.
+It's should returns a generator, but this module it may be a list.
+
+>>> data = list("cab")
+>>> list(sorted(data))
+['a', 'b', 'c']
+>>> list(reversed("abc"))
+['c', 'b', 'a']
+"""
+
+import copy
+import operator
+
+# --- True and False constants from Python 2.0                ---
+# --- Warning: for Python < 2.3, they are aliases for 1 and 0 ---
+try:
+    True = True
+    False = False
+except NameError:
+    True = 1
+    False = 0
+
+# --- any() from Python 2.5 ---
+try:
+    from __builtin__ import any
+except ImportError:
+    def any(items):
+        for item in items:
+            if item:
+                return True
+        return False
+
+# ---all() from Python 2.5 ---
+try:
+    from __builtin__ import all
+except ImportError:
+    def all(items):
+        return reduce(operator.__and__, items)
+
+# --- test if interpreter supports yield keyword ---
+try:
+    eval(compile("""
+from __future__ import generators
+
+def gen():
+    yield 1
+    yield 2
+
+if list(gen()) != [1, 2]:
+    raise KeyError("42")
+""", "<string>", "exec"))
+except (KeyError, SyntaxError):
+    has_yield = False
+else:
+    has_yield = True
+
+# --- test if interpreter supports slices (with step argument) ---
+try:
+    has_slice = eval('"abc"[::-1] == "cba"')
+except (TypeError, SyntaxError):
+    has_slice = False
+
+# --- isinstance with isinstance Python 2.3 behaviour (arg 2 is a type) ---
+try:
+    if isinstance(1, int):
+        from __builtin__ import isinstance
+except TypeError:
+    print "Redef isinstance"
+    def isinstance20(a, typea):
+        if type(typea) != type(type):
+            raise TypeError("TypeError: isinstance() arg 2 must be a class, type, or tuple of classes and types")
+        return type(typea) != typea
+    isinstance = isinstance20
+
+# --- reversed() from Python 2.4 ---
+try:
+    from __builtin__ import reversed
+except ImportError:
+#    if hasYield() == "ok":
+#        code = """
+#def reversed(data):
+#    for index in xrange(len(data)-1, -1, -1):
+#        yield data[index];
+#reversed"""
+#        reversed = eval(compile(code, "<string>", "exec"))
+    if has_slice:
+        def reversed(data):
+            if not isinstance(data, list):
+                data = list(data)
+            return data[::-1]
+    else:
+        def reversed(data):
+            if not isinstance(data, list):
+                data = list(data)
+            reversed_data = []
+            for index in xrange(len(data)-1, -1, -1):
+                reversed_data.append(data[index])
+            return reversed_data
+
+# --- sorted() from Python 2.4 ---
+try:
+    from __builtin__ import sorted
+except ImportError:
+    def sorted(data):
+        sorted_data = copy.copy(data)
+        sorted_data.sort()
+        return sorted
+
+__all__ = ("True", "False",
+    "any", "all", "has_yield", "has_slice",
+    "isinstance", "reversed", "sorted")
+
diff --git a/lib/hachoir_core/config.py b/lib/hachoir_core/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..9250b627651c82819173afcc12687427d8c0e9c6
--- /dev/null
+++ b/lib/hachoir_core/config.py
@@ -0,0 +1,29 @@
+"""
+Configuration of Hachoir
+"""
+
+import os
+
+# UI: display options
+max_string_length = 40    # Max. length in characters of GenericString.display
+max_byte_length = 14      # Max. length in bytes of RawBytes.display
+max_bit_length = 256      # Max. length in bits of RawBits.display
+unicode_stdout = True     # Replace stdout and stderr with Unicode compatible objects
+                          # Disable it for readline or ipython
+
+# Global options
+debug = False             # Display many informations usefull to debug
+verbose = False           # Display more informations
+quiet = False             # Don't display warnings
+
+# Use internationalization and localization (gettext)?
+if os.name == "nt":
+    # TODO: Remove this hack and make i18n works on Windows :-)
+    use_i18n = False
+else:
+    use_i18n = True
+
+# Parser global options
+autofix = True            # Enable Autofix? see hachoir_core.field.GenericFieldSet
+check_padding_pattern = True   # Check padding fields pattern?
+
diff --git a/lib/hachoir_core/dict.py b/lib/hachoir_core/dict.py
new file mode 100644
index 0000000000000000000000000000000000000000..f887683c78416acd4ac264f345723f9ed124235c
--- /dev/null
+++ b/lib/hachoir_core/dict.py
@@ -0,0 +1,183 @@
+"""
+Dictionnary classes which store values order.
+"""
+
+from hachoir_core.error import HachoirError
+from hachoir_core.i18n import _
+
+class UniqKeyError(HachoirError):
+    """
+    Error raised when a value is set whereas the key already exist in a
+    dictionnary.
+    """
+    pass
+
+class Dict(object):
+    """
+    This class works like classic Python dict() but has an important method:
+    __iter__() which allow to iterate into the dictionnary _values_ (and not
+    keys like Python's dict does).
+    """
+    def __init__(self, values=None):
+        self._index = {}        # key => index
+        self._key_list = []     # index => key
+        self._value_list = []   # index => value
+        if values:
+            for key, value in values:
+                self.append(key,value)
+
+    def _getValues(self):
+        return self._value_list
+    values = property(_getValues)
+
+    def index(self, key):
+        """
+        Search a value by its key and returns its index
+        Returns None if the key doesn't exist.
+
+        >>> d=Dict( (("two", "deux"), ("one", "un")) )
+        >>> d.index("two")
+        0
+        >>> d.index("one")
+        1
+        >>> d.index("three") is None
+        True
+        """
+        return self._index.get(key)
+
+    def __getitem__(self, key):
+        """
+        Get item with specified key.
+        To get a value by it's index, use mydict.values[index]
+
+        >>> d=Dict( (("two", "deux"), ("one", "un")) )
+        >>> d["one"]
+        'un'
+        """
+        return self._value_list[self._index[key]]
+
+    def __setitem__(self, key, value):
+        self._value_list[self._index[key]] = value
+
+    def append(self, key, value):
+        """
+        Append new value
+        """
+        if key in self._index:
+            raise UniqKeyError(_("Key '%s' already exists") % key)
+        self._index[key] = len(self._value_list)
+        self._key_list.append(key)
+        self._value_list.append(value)
+
+    def __len__(self):
+        return len(self._value_list)
+
+    def __contains__(self, key):
+        return key in self._index
+
+    def __iter__(self):
+        return iter(self._value_list)
+
+    def iteritems(self):
+        """
+        Create a generator to iterate on: (key, value).
+
+        >>> d=Dict( (("two", "deux"), ("one", "un")) )
+        >>> for key, value in d.iteritems():
+        ...    print "%r: %r" % (key, value)
+        ...
+        'two': 'deux'
+        'one': 'un'
+        """
+        for index in xrange(len(self)):
+            yield (self._key_list[index], self._value_list[index])
+
+    def itervalues(self):
+        """
+        Create an iterator on values
+        """
+        return iter(self._value_list)
+
+    def iterkeys(self):
+        """
+        Create an iterator on keys
+        """
+        return iter(self._key_list)
+
+    def replace(self, oldkey, newkey, new_value):
+        """
+        Replace an existing value with another one
+
+        >>> d=Dict( (("two", "deux"), ("one", "un")) )
+        >>> d.replace("one", "three", 3)
+        >>> d
+        {'two': 'deux', 'three': 3}
+
+        You can also use the classic form:
+
+        >>> d['three'] = 4
+        >>> d
+        {'two': 'deux', 'three': 4}
+        """
+        index = self._index[oldkey]
+        self._value_list[index] = new_value
+        if oldkey != newkey:
+            del self._index[oldkey]
+            self._index[newkey] = index
+            self._key_list[index] = newkey
+
+    def __delitem__(self, index):
+        """
+        Delete item at position index. May raise IndexError.
+
+        >>> d=Dict( ((6, 'six'), (9, 'neuf'), (4, 'quatre')) )
+        >>> del d[1]
+        >>> d
+        {6: 'six', 4: 'quatre'}
+        """
+        if index < 0:
+            index += len(self._value_list)
+        if not (0 <= index < len(self._value_list)):
+            raise IndexError(_("list assignment index out of range (%s/%s)")
+                % (index, len(self._value_list)))
+        del self._value_list[index]
+        del self._key_list[index]
+
+        # First loop which may alter self._index
+        for key, item_index in self._index.iteritems():
+            if item_index == index:
+                del self._index[key]
+                break
+
+        # Second loop update indexes
+        for key, item_index in self._index.iteritems():
+            if index < item_index:
+                self._index[key] -= 1
+
+    def insert(self, index, key, value):
+        """
+        Insert an item at specified position index.
+
+        >>> d=Dict( ((6, 'six'), (9, 'neuf'), (4, 'quatre')) )
+        >>> d.insert(1, '40', 'quarante')
+        >>> d
+        {6: 'six', '40': 'quarante', 9: 'neuf', 4: 'quatre'}
+        """
+        if key in self:
+            raise UniqKeyError(_("Insert error: key '%s' ready exists") % key)
+        _index = index
+        if index < 0:
+            index += len(self._value_list)
+        if not(0 <= index <= len(self._value_list)):
+            raise IndexError(_("Insert error: index '%s' is invalid") % _index)
+        for item_key, item_index in self._index.iteritems():
+            if item_index >= index:
+                self._index[item_key] += 1
+        self._index[key] = index
+        self._key_list.insert(index, key)
+        self._value_list.insert(index, value)
+
+    def __repr__(self):
+        items = ( "%r: %r" % (key, value) for key, value in self.iteritems() )
+        return "{%s}" % ", ".join(items)
+
diff --git a/lib/hachoir_core/endian.py b/lib/hachoir_core/endian.py
new file mode 100644
index 0000000000000000000000000000000000000000..5f6ae88b415490224f552699171c8f63e62efa1b
--- /dev/null
+++ b/lib/hachoir_core/endian.py
@@ -0,0 +1,15 @@
+"""
+Constant values about endian.
+"""
+
+from hachoir_core.i18n import _
+
+BIG_ENDIAN = "ABCD"
+LITTLE_ENDIAN = "DCBA"
+NETWORK_ENDIAN = BIG_ENDIAN
+
+endian_name = {
+    BIG_ENDIAN: _("Big endian"),
+    LITTLE_ENDIAN: _("Little endian"),
+}
+
diff --git a/lib/hachoir_core/error.py b/lib/hachoir_core/error.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ec6b57382ecf0805a40180f563554510d57631b
--- /dev/null
+++ b/lib/hachoir_core/error.py
@@ -0,0 +1,45 @@
+"""
+Functions to display an error (error, warning or information) message.
+"""
+
+from hachoir_core.log import log
+from hachoir_core.tools import makePrintable
+import sys, traceback
+
+def getBacktrace(empty="Empty backtrace."):
+    """
+    Try to get backtrace as string.
+    Returns "Error while trying to get backtrace" on failure.
+    """
+    try:
+        info = sys.exc_info()
+        trace = traceback.format_exception(*info)
+        sys.exc_clear()
+        if trace[0] != "None\n":
+            return "".join(trace)
+    except:
+        # No i18n here (imagine if i18n function calls error...)
+        return "Error while trying to get backtrace"
+    return empty
+
+class HachoirError(Exception):
+    """
+    Parent of all errors in Hachoir library
+    """
+    def __init__(self, message):
+        message_bytes = makePrintable(message, "ASCII")
+        Exception.__init__(self, message_bytes)
+        self.text = message
+
+    def __unicode__(self):
+        return self.text
+
+# Error classes which may be raised by Hachoir core
+# FIXME: Add EnvironmentError (IOError or OSError) and AssertionError?
+# FIXME: Remove ArithmeticError and RuntimeError?
+HACHOIR_ERRORS = (HachoirError, LookupError, NameError, AttributeError,
+    TypeError, ValueError, ArithmeticError, RuntimeError)
+
+info    = log.info
+warning = log.warning
+error   = log.error
diff --git a/lib/hachoir_core/event_handler.py b/lib/hachoir_core/event_handler.py
new file mode 100644
index 0000000000000000000000000000000000000000..80f474b72266838f6299df9359a370648d3acd3d
--- /dev/null
+++ b/lib/hachoir_core/event_handler.py
@@ -0,0 +1,26 @@
+class EventHandler(object):
+    """
+    Class to connect events to event handlers.
+    """
+
+    def __init__(self):
+        self.handlers = {}
+
+    def connect(self, event_name, handler):
+        """
+        Connect an event handler to an event. Append it to handlers list.
+        """
+        try:
+            self.handlers[event_name].append(handler)
+        except KeyError:
+            self.handlers[event_name] = [handler]
+
+    def raiseEvent(self, event_name, *args):
+        """
+        Raiser an event: call each handler for this event_name.
+        """
+        if event_name not in self.handlers:
+            return
+        for handler in self.handlers[event_name]:
+            handler(*args)
+
diff --git a/lib/hachoir_core/field/__init__.py b/lib/hachoir_core/field/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f313c9ba11ff92476ba436cf2528a34b4a7ec486
--- /dev/null
+++ b/lib/hachoir_core/field/__init__.py
@@ -0,0 +1,59 @@
+# Field classes
+from hachoir_core.field.field import Field, FieldError, MissingField, joinPath
+from hachoir_core.field.bit_field import Bit, Bits, RawBits
+from hachoir_core.field.byte_field import Bytes, RawBytes
+from hachoir_core.field.sub_file import SubFile, CompressedField
+from hachoir_core.field.character import Character
+from hachoir_core.field.integer import (
+    Int8,  Int16,  Int24,  Int32,  Int64,
+    UInt8, UInt16, UInt24, UInt32, UInt64,
+    GenericInteger)
+from hachoir_core.field.enum import Enum
+from hachoir_core.field.string_field import (GenericString,
+    String, CString, UnixLine,
+    PascalString8, PascalString16, PascalString32)
+from hachoir_core.field.padding import (PaddingBits, PaddingBytes,
+    NullBits, NullBytes)
+
+# Functions
+from hachoir_core.field.helper import (isString, isInteger,
+    createPaddingField, createNullField, createRawField,
+    writeIntoFile, createOrphanField)
+
+# FieldSet classes
+from hachoir_core.field.fake_array import FakeArray
+from hachoir_core.field.basic_field_set import (BasicFieldSet,
+    ParserError, MatchError)
+from hachoir_core.field.generic_field_set import GenericFieldSet
+from hachoir_core.field.seekable_field_set import SeekableFieldSet, RootSeekableFieldSet
+from hachoir_core.field.field_set import FieldSet
+from hachoir_core.field.static_field_set import StaticFieldSet
+from hachoir_core.field.parser import Parser
+from hachoir_core.field.vector import GenericVector, UserVector
+
+# Complex types
+from hachoir_core.field.float import Float32, Float64, Float80
+from hachoir_core.field.timestamp import (GenericTimestamp,
+    TimestampUnix32, TimestampUnix64, TimestampMac32, TimestampUUID60, TimestampWin64,
+    DateTimeMSDOS32, TimeDateMSDOS32, TimedeltaWin64)
+
+# Special Field classes
+from hachoir_core.field.link import Link, Fragment
+
+available_types = (
+    Bit, Bits, RawBits,
+    Bytes, RawBytes,
+    SubFile,
+    Character,
+    Int8, Int16, Int24, Int32, Int64,
+    UInt8, UInt16, UInt24, UInt32, UInt64,
+    String, CString, UnixLine,
+    PascalString8, PascalString16, PascalString32,
+    Float32, Float64,
+    PaddingBits, PaddingBytes,
+    NullBits, NullBytes,
+    TimestampUnix32, TimestampMac32, TimestampWin64,
+    DateTimeMSDOS32, TimeDateMSDOS32,
+#    GenericInteger, GenericString,
+)
+
diff --git a/lib/hachoir_core/field/basic_field_set.py b/lib/hachoir_core/field/basic_field_set.py
new file mode 100644
index 0000000000000000000000000000000000000000..c044124b8185da4368b4c860b2db31ab3f91063a
--- /dev/null
+++ b/lib/hachoir_core/field/basic_field_set.py
@@ -0,0 +1,147 @@
+from hachoir_core.field import Field, FieldError
+from hachoir_core.stream import InputStream
+from hachoir_core.endian import BIG_ENDIAN, LITTLE_ENDIAN
+from hachoir_core.event_handler import EventHandler
+
+class ParserError(FieldError):
+    """
+    Error raised by a field set.
+
+    @see: L{FieldError}
+    """
+    pass
+
+class MatchError(FieldError):
+    """
+    Error raised by a field set when the stream content doesn't
+    match to file format.
+
+    @see: L{FieldError}
+    """
+    pass
+
+class BasicFieldSet(Field):
+    _event_handler = None
+    is_field_set = True
+    endian = None
+
+    def __init__(self, parent, name, stream, description, size):
+        # Sanity checks (preconditions)
+        assert not parent or issubclass(parent.__class__, BasicFieldSet)
+        assert issubclass(stream.__class__, InputStream)
+
+        # Set field set size
+        if size is None and self.static_size:
+            assert isinstance(self.static_size, (int, long))
+            size = self.static_size
+
+        # Set Field attributes
+        self._parent = parent
+        self._name = name
+        self._size = size
+        self._description = description
+        self.stream = stream
+        self._field_array_count = {}
+
+        # Set endian
+        if not self.endian:
+            assert parent and parent.endian
+            self.endian = parent.endian
+
+        if parent:
+            # This field set is one of the root leafs
+            self._address = parent.nextFieldAddress()
+            self.root = parent.root
+            assert id(self.stream) == id(parent.stream)
+        else:
+            # This field set is the root
+            self._address = 0
+            self.root = self
+            self._global_event_handler = None
+
+        # Sanity checks (post-conditions)
+        assert self.endian in (BIG_ENDIAN, LITTLE_ENDIAN)
+        if (self._size is not None) and (self._size <= 0):
+            raise ParserError("Invalid parser '%s' size: %s" % (self.path, self._size))
+
+    def reset(self):
+        self._field_array_count = {}
+
+    def createValue(self):
+        return None
+
+    def connectEvent(self, event_name, handler, local=True):
+        assert event_name in (
+            # Callback prototype: def f(field)
+            # Called when new value is already set
+            "field-value-changed",
+
+            # Callback prototype: def f(field)
+            # Called when field size is already set
+            "field-resized",
+
+            # A new field has been inserted in the field set
+            # Callback prototype: def f(index, new_field)
+            "field-inserted",
+
+            # Callback prototype: def f(old_field, new_field)
+            # Called when new field is already in field set
+            "field-replaced",
+
+            # Callback prototype: def f(field, new_value)
+            # Called to ask to set new value
+            "set-field-value"
+        ), "Event name %r is invalid" % event_name
+        if local:
+            if self._event_handler is None:
+                self._event_handler = EventHandler()
+            self._event_handler.connect(event_name, handler)
+        else:
+            if self.root._global_event_handler is None:
+                self.root._global_event_handler = EventHandler()
+            self.root._global_event_handler.connect(event_name, handler)
+
+    def raiseEvent(self, event_name, *args):
+        # Transfer event to local listeners
+        if self._event_handler is not None:
+            self._event_handler.raiseEvent(event_name, *args)
+
+        # Transfer event to global listeners
+        if self.root._global_event_handler is not None:
+            self.root._global_event_handler.raiseEvent(event_name, *args)
+
+    def setUniqueFieldName(self, field):
+        key = field._name[:-2]
+        try:
+            self._field_array_count[key] += 1
+        except KeyError:
+            self._field_array_count[key] = 0
+        field._name = key + "[%u]" % self._field_array_count[key]
+
+    def readFirstFields(self, number):
+        """
+        Read first number fields if they are not read yet.
+
+        Returns number of new added fields.
+        """
+        number = number - self.current_length
+        if 0 < number:
+            return self.readMoreFields(number)
+        else:
+            return 0
+
+    def createFields(self):
+        raise NotImplementedError()
+    def __iter__(self):
+        raise NotImplementedError()
+    def __len__(self):
+        raise NotImplementedError()
+    def getField(self, key, const=True):
+        raise NotImplementedError()
+    def nextFieldAddress(self):
+        raise NotImplementedError()
+    def getFieldIndex(self, field):
+        raise NotImplementedError()
+    def readMoreFields(self, number):
+        raise NotImplementedError()
+
diff --git a/lib/hachoir_core/field/bit_field.py b/lib/hachoir_core/field/bit_field.py
new file mode 100644
index 0000000000000000000000000000000000000000..8fae3c7c2685cce075ad9f26a3484249b5a06e4b
--- /dev/null
+++ b/lib/hachoir_core/field/bit_field.py
@@ -0,0 +1,68 @@
+"""
+Bit sized classes:
+- Bit: Single bit, value is False or True ;
+- Bits: Integer with a size in bits ;
+- RawBits: unknown content with a size in bits.
+"""
+
+from hachoir_core.field import Field
+from hachoir_core.i18n import _
+from hachoir_core import config
+
+class RawBits(Field):
+    """
+    Unknown content with a size in bits.
+    """
+    static_size = staticmethod(lambda *args, **kw: args[1])
+
+    def __init__(self, parent, name, size, description=None):
+        """
+        Constructor: see L{Field.__init__} for parameter description
+        """
+        Field.__init__(self, parent, name, size, description)
+
+    def hasValue(self):
+        return True
+
+    def createValue(self):
+        return self._parent.stream.readBits(
+            self.absolute_address, self._size, self._parent.endian)
+
+    def createDisplay(self):
+        if self._size < config.max_bit_length:
+            return unicode(self.value)
+        else:
+            return _("<%s size=%u>" %
+                (self.__class__.__name__, self._size))
+    createRawDisplay = createDisplay
+
+class Bits(RawBits):
+    """
+    Positive integer with a size in bits
+
+    @see: L{Bit}
+    @see: L{RawBits}
+    """
+    pass
+
+class Bit(RawBits):
+    """
+    Single bit: value can be False or True, and size is exactly one bit.
+
+    @see: L{Bits}
+    """
+    static_size = 1
+
+    def __init__(self, parent, name, description=None):
+        """
+        Constructor: see L{Field.__init__} for parameter description
+        """
+        RawBits.__init__(self, parent, name, 1, description=description)
+
+    def createValue(self):
+        return 1 == self._parent.stream.readBits(
+                self.absolute_address, 1, self._parent.endian)
+
+    def createRawDisplay(self):
+        return unicode(int(self.value))
+
diff --git a/lib/hachoir_core/field/byte_field.py b/lib/hachoir_core/field/byte_field.py
new file mode 100644
index 0000000000000000000000000000000000000000..16db18102c8aabb8b3d2aedfc2e0f065d25293fe
--- /dev/null
+++ b/lib/hachoir_core/field/byte_field.py
@@ -0,0 +1,73 @@
+"""
+Very basic field: raw content with a size in byte. Use this class for
+unknown content.
+"""
+
+from hachoir_core.field import Field, FieldError
+from hachoir_core.tools import makePrintable
+from hachoir_core.bits import str2hex
+from hachoir_core import config
+
+MAX_LENGTH = (2**64)
+
+class RawBytes(Field):
+    """
+    Byte vector of unknown content
+
+    @see: L{Bytes}
+    """
+    static_size = staticmethod(lambda *args, **kw: args[1]*8)
+
+    def __init__(self, parent, name, length, description="Raw data"):
+        assert issubclass(parent.__class__, Field)
+        if not(0 < length <= MAX_LENGTH):
+            raise FieldError("Invalid RawBytes length (%s)!" % length)
+        Field.__init__(self, parent, name, length*8, description)
+        self._display = None
+
+    def _createDisplay(self, human):
+        max_bytes = config.max_byte_length
+        if type(self._getValue) is type(lambda: None):
+            display = self.value[:max_bytes]
+        else:
+            if self._display is None:
+                address = self.absolute_address
+                length = min(self._size / 8, max_bytes)
+                self._display = self._parent.stream.readBytes(address, length)
+            display = self._display
+        truncated = (8 * len(display) < self._size)
+        if human:
+            if truncated:
+                display += "(...)"
+            return makePrintable(display, "latin-1", quote='"', to_unicode=True)
+        else:
+            display = str2hex(display, format=r"\x%02x")
+            if truncated:
+                return '"%s(...)"' % display
+            else:
+                return '"%s"' % display
+
+    def createDisplay(self):
+        return self._createDisplay(True)
+
+    def createRawDisplay(self):
+        return self._createDisplay(False)
+
+    def hasValue(self):
+        return True
+
+    def createValue(self):
+        assert (self._size % 8) == 0
+        if self._display:
+            self._display = None
+        return self._parent.stream.readBytes(
+            self.absolute_address, self._size / 8)
+
+class Bytes(RawBytes):
+    """
+    Byte vector: can be used for magic number or GUID/UUID for example.
+
+    @see: L{RawBytes}
+    """
+    pass
+
diff --git a/lib/hachoir_core/field/character.py b/lib/hachoir_core/field/character.py
new file mode 100644
index 0000000000000000000000000000000000000000..42bc4b1e7ca4f9560758a0c06ef2d90dab2cee65
--- /dev/null
+++ b/lib/hachoir_core/field/character.py
@@ -0,0 +1,27 @@
+"""
+Character field class: a 8-bit character
+"""
+
+from hachoir_core.field import Bits
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.tools import makePrintable
+
+class Character(Bits):
+    """
+    A 8-bit character using ASCII charset for display attribute.
+    """
+    static_size = 8
+
+    def __init__(self, parent, name, description=None):
+        Bits.__init__(self, parent, name, 8, description=description)
+
+    def createValue(self):
+        return chr(self._parent.stream.readBits(
+            self.absolute_address, 8, BIG_ENDIAN))
+
+    def createRawDisplay(self):
+        return unicode(Bits.createValue(self))
+
+    def createDisplay(self):
+        return makePrintable(self.value, "ASCII", quote="'", to_unicode=True)
+
diff --git a/lib/hachoir_core/field/enum.py b/lib/hachoir_core/field/enum.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc04a29e909c95fe4e65bbfaf4eef21660ed1d99
--- /dev/null
+++ b/lib/hachoir_core/field/enum.py
@@ -0,0 +1,26 @@
+def Enum(field, enum, key_func=None):
+    """
+    Enum is an adapter to another field: it will just change its display
+    attribute. It uses a dictionnary to associate a value to another.
+
+    key_func is an optional function with prototype "def func(key)->key"
+    which is called to transform key.
+    """
+    display = field.createDisplay
+    if key_func:
+        def createDisplay():
+            try:
+                key = key_func(field.value)
+                return enum[key]
+            except LookupError:
+                return display()
+    else:
+        def createDisplay():
+            try:
+                return enum[field.value]
+            except LookupError:
+                return display()
+    field.createDisplay = createDisplay
+    field.getEnum = lambda: enum
+    return field
+
diff --git a/lib/hachoir_core/field/fake_array.py b/lib/hachoir_core/field/fake_array.py
new file mode 100644
index 0000000000000000000000000000000000000000..5535cafebb30bba888617c78f1eef61138b9a31a
--- /dev/null
+++ b/lib/hachoir_core/field/fake_array.py
@@ -0,0 +1,81 @@
+import itertools
+from hachoir_core.field import MissingField
+
+class FakeArray:
+    """
+    Simulate an array for GenericFieldSet.array(): fielset.array("item")[0] is
+    equivalent to fielset.array("item[0]").
+
+    It's possible to iterate over the items using::
+
+        for element in fieldset.array("item"):
+            ...
+
+    And to get array size using len(fieldset.array("item")).
+    """
+    def __init__(self, fieldset, name):
+        pos = name.rfind("/")
+        if pos != -1:
+            self.fieldset = fieldset[name[:pos]]
+            self.name = name[pos+1:]
+        else:
+            self.fieldset = fieldset
+            self.name = name
+        self._format = "%s[%%u]" % self.name
+        self._cache = {}
+        self._known_size = False
+        self._max_index = -1
+
+    def __nonzero__(self):
+        "Is the array empty or not?"
+        if self._cache:
+            return True
+        else:
+            return (0 in self)
+
+    def __len__(self):
+        "Number of fields in the array"
+        total = self._max_index+1
+        if not self._known_size:
+            for index in itertools.count(total):
+                try:
+                    field = self[index]
+                    total += 1
+                except MissingField:
+                    break
+        return total
+
+    def __contains__(self, index):
+        try:
+            field = self[index]
+            return True
+        except MissingField:
+            return False
+
+    def __getitem__(self, index):
+        """
+        Get a field of the array. Returns a field, or raise MissingField
+        exception if the field doesn't exist.
+        """
+        try:
+            value = self._cache[index]
+        except KeyError:
+            try:
+                value = self.fieldset[self._format % index]
+            except MissingField:
+                self._known_size = True
+                raise
+            self._cache[index] = value
+            self._max_index = max(index, self._max_index)
+        return value
+
+    def __iter__(self):
+        """
+        Iterate in the fields in their index order: field[0], field[1], ...
+        """
+        for index in itertools.count(0):
+            try:
+                yield self[index]
+            except MissingField:
+                raise StopIteration()
+
diff --git a/lib/hachoir_core/field/field.py b/lib/hachoir_core/field/field.py
new file mode 100644
index 0000000000000000000000000000000000000000..e4c949af7348d6ec813f6a7bc6ac3d399198eed1
--- /dev/null
+++ b/lib/hachoir_core/field/field.py
@@ -0,0 +1,260 @@
+"""
+Parent of all (field) classes in Hachoir: Field.
+"""
+
+from hachoir_core.compatibility import reversed
+from hachoir_core.stream import InputFieldStream
+from hachoir_core.error import HachoirError, HACHOIR_ERRORS
+from hachoir_core.log import Logger
+from hachoir_core.i18n import _
+from hachoir_core.tools import makePrintable
+from weakref import ref as weakref_ref
+
+class FieldError(HachoirError):
+    """
+    Error raised by a L{Field}.
+
+    @see: L{HachoirError}
+    """
+    pass
+
+def joinPath(path, name):
+    if path != "/":
+        return "/".join((path, name))
+    else:
+        return "/%s" % name
+
+class MissingField(KeyError, FieldError):
+    def __init__(self, field, key):
+        KeyError.__init__(self)
+        self.field = field
+        self.key = key
+
+    def __str__(self):
+        return 'Can\'t get field "%s" from %s' % (self.key, self.field.path)
+
+    def __unicode__(self):
+        return u'Can\'t get field "%s" from %s' % (self.key, self.field.path)
+
+class Field(Logger):
+    # static size can have two differents value: None (no static size), an
+    # integer (number of bits), or a function which returns an integer.
+    #
+    # This function receives exactly the same arguments than the constructor
+    # except the first one (one). Example of function:
+    #    static_size = staticmethod(lambda *args, **kw: args[1])
+    static_size = None
+
+    # Indicate if this field contains other fields (is a field set) or not
+    is_field_set = False
+
+    def __init__(self, parent, name, size=None, description=None):
+        """
+        Set default class attributes, set right address if None address is
+        given.
+
+        @param parent: Parent field of this field
+        @type parent: L{Field}|None
+        @param name: Name of the field, have to be unique in parent. If it ends
+            with "[]", end will be replaced with "[new_id]" (eg. "raw[]"
+            becomes "raw[0]", next will be "raw[1]", and then "raw[2]", etc.)
+        @type name: str
+        @param size: Size of the field in bit (can be None, so it
+            will be computed later)
+        @type size: int|None
+        @param address: Address in bit relative to the parent absolute address
+        @type address: int|None
+        @param description: Optional string description
+        @type description: str|None
+        """
+        assert issubclass(parent.__class__, Field)
+        assert (size is None) or (0 <= size)
+        self._parent = parent
+        self._name = name
+        self._address = parent.nextFieldAddress()
+        self._size = size
+        self._description = description
+
+    def _logger(self):
+        return self.path
+
+    def createDescription(self):
+        return ""
+    def _getDescription(self):
+        if self._description is None:
+            try:
+                self._description = self.createDescription()
+                if isinstance(self._description, str):
+                    self._description = makePrintable(
+                        self._description, "ISO-8859-1", to_unicode=True)
+            except HACHOIR_ERRORS, err:
+                self.error("Error getting description: " + unicode(err))
+                self._description = ""
+        return self._description
+    description = property(_getDescription,
+    doc="Description of the field (string)")
+
+    def __str__(self):
+        return self.display
+    def __unicode__(self):
+        return self.display
+    def __repr__(self):
+        return "<%s path=%r, address=%s, size=%s>" % (
+            self.__class__.__name__, self.path, self._address, self._size)
+
+    def hasValue(self):
+        return self._getValue() is not None
+    def createValue(self):
+        raise NotImplementedError()
+    def _getValue(self):
+        try:
+            value = self.createValue()
+        except HACHOIR_ERRORS, err:
+            self.error(_("Unable to create value: %s") % unicode(err))
+            value = None
+        self._getValue = lambda: value
+        return value
+    value = property(lambda self: self._getValue(), doc="Value of field")
+
+    def _getParent(self):
+        return self._parent
+    parent = property(_getParent, doc="Parent of this field")
+
+    def createDisplay(self):
+        return unicode(self.value)
+    def _getDisplay(self):
+        if not hasattr(self, "_Field__display"):
+            try:
+                self.__display = self.createDisplay()
+            except HACHOIR_ERRORS, err:
+                self.error("Unable to create display: %s" % err)
+                self.__display = u""
+        return self.__display
+    display = property(lambda self: self._getDisplay(),
+    doc="Short (unicode) string which represents field content")
+
+    def createRawDisplay(self):
+        value = self.value
+        if isinstance(value, str):
+            return makePrintable(value, "ASCII", to_unicode=True)
+        else:
+            return unicode(value)
+    def _getRawDisplay(self):
+        if not hasattr(self, "_Field__raw_display"):
+            try:
+                self.__raw_display = self.createRawDisplay()
+            except HACHOIR_ERRORS, err:
+                self.error("Unable to create raw display: %s" % err)
+                self.__raw_display = u""
+        return self.__raw_display
+    raw_display = property(lambda self: self._getRawDisplay(),
+    doc="(Unicode) string which represents raw field content")
+
+    def _getName(self):
+        return self._name
+    name = property(_getName,
+    doc="Field name (unique in its parent field set list)")
+
+    def _getIndex(self):
+        if not self._parent:
+            return None
+        return self._parent.getFieldIndex(self)
+    index = property(_getIndex)
+
+    def _getPath(self):
+        if not self._parent:
+            return '/'
+        names = []
+        field = self
+        while field:
+            names.append(field._name)
+            field = field._parent
+        names[-1] = ''
+        return '/'.join(reversed(names))
+    path = property(_getPath,
+    doc="Full path of the field starting at root field")
+
+    def _getAddress(self):
+        return self._address
+    address = property(_getAddress,
+    doc="Relative address in bit to parent address")
+
+    def _getAbsoluteAddress(self):
+        address = self._address
+        current = self._parent
+        while current:
+            address += current._address
+            current = current._parent
+        return address
+    absolute_address = property(_getAbsoluteAddress,
+    doc="Absolute address (from stream beginning) in bit")
+
+    def _getSize(self):
+        return self._size
+    size = property(_getSize, doc="Content size in bit")
+
+    def _getField(self, name, const):
+        if name.strip("."):
+            return None
+        field = self
+        for index in xrange(1, len(name)):
+            field = field._parent
+            if field is None:
+                break
+        return field
+
+    def getField(self, key, const=True):
+        if key:
+            if key[0] == "/":
+                if self._parent:
+                    current = self._parent.root
+                else:
+                    current = self
+                if len(key) == 1:
+                    return current
+                key = key[1:]
+            else:
+                current = self
+            for part in key.split("/"):
+                field = current._getField(part, const)
+                if field is None:
+                    raise MissingField(current, part)
+                current = field
+            return current
+        raise KeyError("Key must not be an empty string!")
+
+    def __getitem__(self, key):
+        return self.getField(key, False)
+
+    def __contains__(self, key):
+        try:
+            return self.getField(key, False) is not None
+        except FieldError:
+            return False
+
+    def _createInputStream(self, **args):
+        assert self._parent
+        return InputFieldStream(self, **args)
+    def getSubIStream(self):
+        if hasattr(self, "_sub_istream"):
+            stream = self._sub_istream()
+        else:
+            stream = None
+        if stream is None:
+            stream = self._createInputStream()
+            self._sub_istream = weakref_ref(stream)
+        return stream
+    def setSubIStream(self, createInputStream):
+        cis = self._createInputStream
+        self._createInputStream = lambda **args: createInputStream(cis, **args)
+
+    def __nonzero__(self):
+        """
+        Method called by code like "if field: (...)".
+        Always returns True
+        """
+        return True
+
+    def getFieldType(self):
+        return self.__class__.__name__
+
diff --git a/lib/hachoir_core/field/field_set.py b/lib/hachoir_core/field/field_set.py
new file mode 100644
index 0000000000000000000000000000000000000000..92b5192626df6c2636816f07653e208b600887f5
--- /dev/null
+++ b/lib/hachoir_core/field/field_set.py
@@ -0,0 +1,7 @@
+from hachoir_core.field import BasicFieldSet, GenericFieldSet
+
+class FieldSet(GenericFieldSet):
+    def __init__(self, parent, name, *args, **kw):
+        assert issubclass(parent.__class__, BasicFieldSet)
+        GenericFieldSet.__init__(self, parent, name, parent.stream, *args, **kw)
+
diff --git a/lib/hachoir_core/field/float.py b/lib/hachoir_core/field/float.py
new file mode 100644
index 0000000000000000000000000000000000000000..27f9756300d7c093e91dd4a5d14243eb3429e7a3
--- /dev/null
+++ b/lib/hachoir_core/field/float.py
@@ -0,0 +1,99 @@
+from hachoir_core.field import Bit, Bits, FieldSet
+from hachoir_core.endian import BIG_ENDIAN, LITTLE_ENDIAN
+import struct
+
+# Make sure that we use right struct types
+assert struct.calcsize("f") == 4
+assert struct.calcsize("d") == 8
+assert struct.unpack("<d", "\x1f\x85\xebQ\xb8\x1e\t@")[0] == 3.14
+assert struct.unpack(">d", "\xc0\0\0\0\0\0\0\0")[0] == -2.0
+
+class FloatMantissa(Bits):
+    def createValue(self):
+        value = Bits.createValue(self)
+        return 1 + float(value) / (2 ** self.size)
+
+    def createRawDisplay(self):
+        return unicode(Bits.createValue(self))
+
+class FloatExponent(Bits):
+    def __init__(self, parent, name, size):
+        Bits.__init__(self, parent, name, size)
+        self.bias = 2 ** (size-1) - 1
+
+    def createValue(self):
+        return Bits.createValue(self) - self.bias
+
+    def createRawDisplay(self):
+        return unicode(self.value + self.bias)
+
+def floatFactory(name, format, mantissa_bits, exponent_bits, doc):
+    size = 1 + mantissa_bits + exponent_bits
+
+    class Float(FieldSet):
+        static_size = size
+        __doc__ = doc
+
+        def __init__(self, parent, name, description=None):
+            assert parent.endian in (BIG_ENDIAN, LITTLE_ENDIAN)
+            FieldSet.__init__(self, parent, name, description, size)
+            if format:
+                if self._parent.endian == BIG_ENDIAN:
+                    self.struct_format = ">"+format
+                else:
+                    self.struct_format = "<"+format
+            else:
+                self.struct_format = None
+
+        def createValue(self):
+            """
+            Create float value: use struct.unpack() when it's possible
+            (32 and 64-bit float) or compute it with :
+               mantissa * (2.0 ** exponent)
+
+            This computation may raise an OverflowError.
+            """
+            if self.struct_format:
+                raw = self._parent.stream.readBytes(
+                    self.absolute_address, self._size//8)
+                try:
+                    return struct.unpack(self.struct_format, raw)[0]
+                except struct.error, err:
+                    raise ValueError("[%s] conversion error: %s" %
+                        (self.__class__.__name__, err))
+            else:
+                try:
+                    value = self["mantissa"].value * (2.0 ** float(self["exponent"].value))
+                    if self["negative"].value:
+                        return -(value)
+                    else:
+                        return value
+                except OverflowError:
+                    raise ValueError("[%s] floating point overflow" %
+                        self.__class__.__name__)
+
+        def createFields(self):
+            yield Bit(self, "negative")
+            yield FloatExponent(self, "exponent", exponent_bits)
+            if 64 <= mantissa_bits:
+                yield Bit(self, "one")
+                yield FloatMantissa(self, "mantissa", mantissa_bits-1)
+            else:
+                yield FloatMantissa(self, "mantissa", mantissa_bits)
+
+    cls = Float
+    cls.__name__ = name
+    return cls
+
+# 32-bit float (standart: IEEE 754/854)
+Float32 = floatFactory("Float32", "f", 23, 8,
+    "Floating point number: format IEEE 754 int 32 bit")
+
+# 64-bit float (standart: IEEE 754/854)
+Float64 = floatFactory("Float64", "d", 52, 11,
+    "Floating point number: format IEEE 754 in 64 bit")
+
+# 80-bit float (standart: IEEE 754/854)
+Float80 = floatFactory("Float80", None, 64, 15,
+    "Floating point number: format IEEE 754 in 80 bit")
+
diff --git a/lib/hachoir_core/field/generic_field_set.py b/lib/hachoir_core/field/generic_field_set.py
new file mode 100644
index 0000000000000000000000000000000000000000..a3b5eb7d59df16ba159c8764129f6d2242e1bb9d
--- /dev/null
+++ b/lib/hachoir_core/field/generic_field_set.py
@@ -0,0 +1,532 @@
+from hachoir_core.field import (MissingField, BasicFieldSet, Field, ParserError,
+    createRawField, createNullField, createPaddingField, FakeArray)
+from hachoir_core.dict import Dict, UniqKeyError
+from hachoir_core.error import HACHOIR_ERRORS
+from hachoir_core.tools import lowerBound
+import hachoir_core.config as config
+
+class GenericFieldSet(BasicFieldSet):
+    """
+    Ordered list of fields. Use operator [] to access fields using their
+    name (field names are unique in a field set, but not in the whole
+    document).
+
+    Class attributes:
+    - endian: Bytes order (L{BIG_ENDIAN} or L{LITTLE_ENDIAN}). Optional if the
+      field set has a parent ;
+    - static_size: (optional) Size of FieldSet in bits. This attribute should
+      be used in parser of constant size.
+
+    Instance attributes/methods:
+    - _fields: Ordered dictionnary of all fields, may be incomplete
+      because feeded when a field is requested ;
+    - stream: Input stream used to feed fields' value
+    - root: The root of all field sets ;
+    - __len__(): Number of fields, may need to create field set ;
+    - __getitem__(): Get an field by it's name or it's path.
+
+    And attributes inherited from Field class:
+    - parent: Parent field (may be None if it's the root) ;
+    - name: Field name (unique in parent field set) ;
+    - value: The field set ;
+    - address: Field address (in bits) relative to parent ;
+    - description: A string describing the content (can be None) ;
+    - size: Size of field set in bits, may need to create field set.
+
+    Event handling:
+    - "connectEvent": Connect an handler to an event ;
+    - "raiseEvent": Raise an event.
+
+    To implement a new field set, you need to:
+    - create a class which inherite from FieldSet ;
+    - write createFields() method using lines like:
+         yield Class(self, "name", ...) ;
+    - and maybe set endian and static_size class attributes.
+    """
+
+    _current_size = 0
+
+    def __init__(self, parent, name, stream, description=None, size=None):
+        """
+        Constructor
+        @param parent: Parent field set, None for root parser
+        @param name: Name of the field, have to be unique in parent. If it ends
+            with "[]", end will be replaced with "[new_id]" (eg. "raw[]"
+            becomes "raw[0]", next will be "raw[1]", and then "raw[2]", etc.)
+        @type name: str
+        @param stream: Input stream from which data are read
+        @type stream: L{InputStream}
+        @param description: Optional string description
+        @type description: str|None
+        @param size: Size in bits. If it's None, size will be computed. You
+            can also set size with class attribute static_size
+        """
+        BasicFieldSet.__init__(self, parent, name, stream, description, size)
+        self._fields = Dict()
+        self._field_generator = self.createFields()
+        self._array_cache = {}
+        self.__is_feeding = False
+
+    def array(self, key):
+        try:
+            return self._array_cache[key]
+        except KeyError:
+            array = FakeArray(self, key)
+            self._array_cache[key] = array
+            return self._array_cache[key]
+
+    def reset(self):
+        """
+        Reset a field set:
+         * clear fields ;
+         * restart field generator ;
+         * set current size to zero ;
+         * clear field array count.
+
+        But keep: name, value, description and size.
+        """
+        BasicFieldSet.reset(self)
+        self._fields = Dict()
+        self._field_generator = self.createFields()
+        self._current_size = 0
+        self._array_cache = {}
+
+    def __str__(self):
+        return '<%s path=%s, current_size=%s, current length=%s>' % \
+            (self.__class__.__name__, self.path, self._current_size, len(self._fields))
+
+    def __len__(self):
+        """
+        Returns number of fields, may need to create all fields
+        if it's not done yet.
+        """
+        if self._field_generator is not None:
+            self._feedAll()
+        return len(self._fields)
+
+    def _getCurrentLength(self):
+        return len(self._fields)
+    current_length = property(_getCurrentLength)
+
+    def _getSize(self):
+        if self._size is None:
+            self._feedAll()
+        return self._size
+    size = property(_getSize, doc="Size in bits, may create all fields to get size")
+
+    def _getCurrentSize(self):
+        assert not(self.done)
+        return self._current_size
+    current_size = property(_getCurrentSize)
+
+    eof = property(lambda self: self._checkSize(self._current_size + 1, True) < 0)
+
+    def _checkSize(self, size, strict):
+        field = self
+        while field._size is None:
+            if not field._parent:
+                assert self.stream.size is None
+                if not strict:
+                    return None
+                if self.stream.sizeGe(size):
+                    return 0
+                break
+            size += field._address
+            field = field._parent
+        return field._size - size
+
+    autofix = property(lambda self: self.root.autofix)
+
+    def _addField(self, field):
+        """
+        Add a field to the field set:
+        * add it into _fields
+        * update _current_size
+
+        May raise a StopIteration() on error
+        """
+        if not issubclass(field.__class__, Field):
+            raise ParserError("Field type (%s) is not a subclass of 'Field'!"
+                % field.__class__.__name__)
+        assert isinstance(field._name, str)
+        if field._name.endswith("[]"):
+            self.setUniqueFieldName(field)
+        if config.debug:
+            self.info("[+] DBG: _addField(%s)" % field.name)
+
+        # required for the msoffice parser
+        if field._address != self._current_size:
+            self.warning("Fix address of %s to %s (was %s)" %
+                (field.path, self._current_size, field._address))
+            field._address = self._current_size
+
+        ask_stop = False
+        # Compute field size and check that there is enough place for it
+        self.__is_feeding = True
+        try:
+            field_size = field.size
+        except HACHOIR_ERRORS, err:
+            if field.is_field_set and field.current_length and field.eof:
+                self.warning("Error when getting size of '%s': %s" % (field.name, err))
+                field._stopFeeding()
+                ask_stop = True
+            else:
+                self.warning("Error when getting size of '%s': delete it" % field.name)
+                self.__is_feeding = False
+                raise
+        self.__is_feeding = False
+
+        # No more place?
+        dsize = self._checkSize(field._address + field.size, False)
+        if (dsize is not None and dsize < 0) or (field.is_field_set and field.size <= 0):
+            if self.autofix and self._current_size:
+                self._fixFieldSize(field, field.size + dsize)
+            else:
+                raise ParserError("Field %s is too large!" % field.path)
+
+        self._current_size += field.size
+        try:
+            self._fields.append(field._name, field)
+        except UniqKeyError, err:
+            self.warning("Duplicate field name " + unicode(err))
+            field._name += "[]"
+            self.setUniqueFieldName(field)
+            self._fields.append(field._name, field)
+        if ask_stop:
+            raise StopIteration()
+
+    def _fixFieldSize(self, field, new_size):
+        if new_size > 0:
+            if field.is_field_set and 0 < field.size:
+                field._truncate(new_size)
+                return
+
+            # Don't add the field <=> delete item
+            if self._size is None:
+                self._size = self._current_size + new_size
+        self.warning("[Autofix] Delete '%s' (too large)" % field.path)
+        raise StopIteration()
+
+    def _getField(self, name, const):
+        field = Field._getField(self, name, const)
+        if field is None:
+            if name in self._fields:
+                field = self._fields[name]
+            elif self._field_generator is not None and not const:
+                field = self._feedUntil(name)
+        return field
+
+    def getField(self, key, const=True):
+        if isinstance(key, (int, long)):
+            if key < 0:
+                raise KeyError("Key must be positive!")
+            if not const:
+                self.readFirstFields(key+1)
+            if len(self._fields.values) <= key:
+                raise MissingField(self, key)
+            return self._fields.values[key]
+        return Field.getField(self, key, const)
+
+    def _truncate(self, size):
+        assert size > 0
+        if size < self._current_size:
+            self._size = size
+            while True:
+                field = self._fields.values[-1]
+                if field._address < size:
+                    break
+                del self._fields[-1]
+            self._current_size = field._address
+            size -= field._address
+            if size < field._size:
+                if field.is_field_set:
+                    field._truncate(size)
+                else:
+                    del self._fields[-1]
+                    field = createRawField(self, size, "raw[]")
+                    self._fields.append(field._name, field)
+            self._current_size = self._size
+        else:
+            assert size < self._size or self._size is None
+            self._size = size
+        if self._size == self._current_size:
+            self._field_generator = None
+
+    def _deleteField(self, index):
+        field = self._fields.values[index]
+        size = field.size
+        self._current_size -= size
+        del self._fields[index]
+        return field
+
+    def _fixLastField(self):
+        """
+        Try to fix last field when we know current field set size.
+        Returns new added field if any, or None.
+        """
+        assert self._size is not None
+
+        # Stop parser
+        message = ["stop parser"]
+        self._field_generator = None
+
+        # If last field is too big, delete it
+        while self._size < self._current_size:
+            field = self._deleteField(len(self._fields)-1)
+            message.append("delete field %s" % field.path)
+        assert self._current_size <= self._size
+
+        # If field size current is smaller: add a raw field
+        size = self._size - self._current_size
+        if size:
+            field = createRawField(self, size, "raw[]")
+            message.append("add padding")
+            self._current_size += field.size
+            self._fields.append(field._name, field)
+        else:
+            field = None
+        message = ", ".join(message)
+        self.warning("[Autofix] Fix parser error: " + message)
+        assert self._current_size == self._size
+        return field
+
+    def _stopFeeding(self):
+        new_field = None
+        if self._size is None:
+            if self._parent:
+                self._size = self._current_size
+        elif self._size != self._current_size:
+            if self.autofix:
+                new_field = self._fixLastField()
+            else:
+                raise ParserError("Invalid parser \"%s\" size!" % self.path)
+        self._field_generator = None
+        return new_field
+
+    def _fixFeedError(self, exception):
+        """
+        Try to fix a feeding error. Returns False if error can't be fixed,
+        otherwise returns new field if any, or None.
+        """
+        if self._size is None or not self.autofix:
+            return False
+        self.warning(unicode(exception))
+        return self._fixLastField()
+
+    def _feedUntil(self, field_name):
+        """
+        Return the field if it was found, None else
+        """
+        if self.__is_feeding \
+        or (self._field_generator and self._field_generator.gi_running):
+            self.warning("Unable to get %s (and generator is already running)"
+                % field_name)
+            return None
+        try:
+            while True:
+                field = self._field_generator.next()
+                self._addField(field)
+                if field.name == field_name:
+                    return field
+        except HACHOIR_ERRORS, err:
+            if self._fixFeedError(err) is False:
+                raise
+        except StopIteration:
+            self._stopFeeding()
+        return None
+
+    def readMoreFields(self, number):
+        """
+        Read more number fields, or do nothing if parsing is done.
+
+        Returns number of new added fields.
+        """
+        if self._field_generator is None:
+            return 0
+        oldlen = len(self._fields)
+        try:
+            for index in xrange(number):
+                self._addField( self._field_generator.next() )
+        except HACHOIR_ERRORS, err:
+            if self._fixFeedError(err) is False:
+                raise
+        except StopIteration:
+            self._stopFeeding()
+        return len(self._fields) - oldlen
+
+    def _feedAll(self):
+        if self._field_generator is None:
+            return
+        try:
+            while True:
+                field = self._field_generator.next()
+                self._addField(field)
+        except HACHOIR_ERRORS, err:
+            if self._fixFeedError(err) is False:
+                raise
+        except StopIteration:
+            self._stopFeeding()
+
+    def __iter__(self):
+        """
+        Create a generator to iterate on each field, may create new
+        fields when needed
+        """
+        try:
+            done = 0
+            while True:
+                if done == len(self._fields):
+                    if self._field_generator is None:
+                        break
+                    self._addField( self._field_generator.next() )
+                for field in self._fields.values[done:]:
+                    yield field
+                    done += 1
+        except HACHOIR_ERRORS, err:
+            field = self._fixFeedError(err)
+            if isinstance(field, Field):
+                yield field
+            elif hasattr(field, '__iter__'):
+                for f in field:
+                    yield f
+            elif field is False:
+                raise
+        except StopIteration:
+            field = self._stopFeeding()
+            if isinstance(field, Field):
+                yield field
+            elif hasattr(field, '__iter__'):
+                for f in field:
+                    yield f
+
+    def _isDone(self):
+        return (self._field_generator is None)
+    done = property(_isDone, doc="Boolean to know if parsing is done or not")
+
+    #
+    # FieldSet_SeekUtility
+    #
+    def seekBit(self, address, name="padding[]",
+    description=None, relative=True, null=False):
+        """
+        Create a field to seek to specified address,
+        or None if it's not needed.
+
+        May raise an (ParserError) exception if address is invalid.
+        """
+        if relative:
+            nbits = address - self._current_size
+        else:
+            nbits = address - (self.absolute_address + self._current_size)
+        if nbits < 0:
+            raise ParserError("Seek error, unable to go back!")
+        if 0 < nbits:
+            if null:
+                return createNullField(self, nbits, name, description)
+            else:
+                return createPaddingField(self, nbits, name, description)
+        else:
+            return None
+
+    def seekByte(self, address, name="padding[]", description=None, relative=True, null=False):
+        """
+        Same as seekBit(), but with address in byte.
+        """
+        return self.seekBit(address * 8, name, description, relative, null=null)
+
+    #
+    # RandomAccessFieldSet
+    #
+    def replaceField(self, name, new_fields):
+        # TODO: Check in self and not self.field
+        # Problem is that "generator is already executing"
+        if name not in self._fields:
+            raise ParserError("Unable to replace %s: field doesn't exist!" % name)
+        assert 1 <= len(new_fields)
+        old_field = self[name]
+        total_size = sum( (field.size for field in new_fields) )
+        if old_field.size != total_size:
+            raise ParserError("Unable to replace %s: "
+                "new field(s) hasn't same size (%u bits instead of %u bits)!"
+                % (name, total_size, old_field.size))
+        field = new_fields[0]
+        if field._name.endswith("[]"):
+            self.setUniqueFieldName(field)
+        field._address = old_field.address
+        if field.name != name and field.name in self._fields:
+            raise ParserError(
+                "Unable to replace %s: name \"%s\" is already used!"
+                % (name, field.name))
+        self._fields.replace(name, field.name, field)
+        self.raiseEvent("field-replaced", old_field, field)
+        if 1 < len(new_fields):
+            index = self._fields.index(new_fields[0].name)+1
+            address = field.address + field.size
+            for field in new_fields[1:]:
+                if field._name.endswith("[]"):
+                    self.setUniqueFieldName(field)
+                field._address = address
+                if field.name in self._fields:
+                    raise ParserError(
+                        "Unable to replace %s: name \"%s\" is already used!"
+                        % (name, field.name))
+                self._fields.insert(index, field.name, field)
+                self.raiseEvent("field-inserted", index, field)
+                index += 1
+                address += field.size
+
+    def getFieldByAddress(self, address, feed=True):
+        """
+        Only search in existing fields
+        """
+        if feed and self._field_generator is not None:
+            self._feedAll()
+        if address < self._current_size:
+            i = lowerBound(self._fields.values, lambda x: x.address + x.size <= address)
+            if i is not None:
+                return self._fields.values[i]
+        return None
+
+    def writeFieldsIn(self, old_field, address, new_fields):
+        """
+        Can only write in existing fields (address < self._current_size)
+        """
+
+        # Check size
+        total_size = sum( field.size for field in new_fields )
+        if old_field.size < total_size:
+            raise ParserError( \
+                "Unable to write fields at address %s " \
+                "(too big)!" % (address))
+
+        # Need padding before?
+        replace = []
+        size = address - old_field.address
+        assert 0 <= size
+        if 0 < size:
+            padding = createPaddingField(self, size)
+            padding._address = old_field.address
+            replace.append(padding)
+
+        # Set fields address
+        for field in new_fields:
+            field._address = address
+            address += field.size
+            replace.append(field)
+
+        # Need padding after?
+        size = (old_field.address + old_field.size) - address
+        assert 0 <= size
+        if 0 < size:
+            padding = createPaddingField(self, size)
+            padding._address = address
+            replace.append(padding)
+
+        self.replaceField(old_field.name, replace)
+
+    def nextFieldAddress(self):
+        return self._current_size
+
+    def getFieldIndex(self, field):
+        return self._fields.index(field._name)
+
diff --git a/lib/hachoir_core/field/helper.py b/lib/hachoir_core/field/helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..ba44f68e3d848de7a0d7f5eb22942e506d307e1e
--- /dev/null
+++ b/lib/hachoir_core/field/helper.py
@@ -0,0 +1,57 @@
+from hachoir_core.field import (FieldError,
+    RawBits, RawBytes,
+    PaddingBits, PaddingBytes,
+    NullBits, NullBytes,
+    GenericString, GenericInteger)
+from hachoir_core.stream import FileOutputStream
+
+def createRawField(parent, size, name="raw[]", description=None):
+    if size <= 0:
+        raise FieldError("Unable to create raw field of %s bits" % size)
+    if (size % 8) == 0:
+        return RawBytes(parent, name, size/8, description)
+    else:
+        return RawBits(parent, name, size, description)
+
+def createPaddingField(parent, nbits, name="padding[]", description=None):
+    if nbits <= 0:
+        raise FieldError("Unable to create padding of %s bits" % nbits)
+    if (nbits % 8) == 0:
+        return PaddingBytes(parent, name, nbits/8, description)
+    else:
+        return PaddingBits(parent, name, nbits, description)
+
+def createNullField(parent, nbits, name="padding[]", description=None):
+    if nbits <= 0:
+        raise FieldError("Unable to create null padding of %s bits" % nbits)
+    if (nbits % 8) == 0:
+        return NullBytes(parent, name, nbits/8, description)
+    else:
+        return NullBits(parent, name, nbits, description)
+
+def isString(field):
+    return issubclass(field.__class__, GenericString)
+
+def isInteger(field):
+    return issubclass(field.__class__, GenericInteger)
+
+def writeIntoFile(fieldset, filename):
+    output = FileOutputStream(filename)
+    fieldset.writeInto(output)
+
+def createOrphanField(fieldset, address, field_cls, *args, **kw):
+    """
+    Create an orphan field at specified address:
+      field_cls(fieldset, *args, **kw)
+
+    The field uses the fieldset properties but it isn't added to the
+    field set.
+    """
+    save_size = fieldset._current_size
+    try:
+        fieldset._current_size = address
+        field = field_cls(fieldset, *args, **kw)
+    finally:
+        fieldset._current_size = save_size
+    return field
+
diff --git a/lib/hachoir_core/field/integer.py b/lib/hachoir_core/field/integer.py
new file mode 100644
index 0000000000000000000000000000000000000000..763f1d2f0981d077b79990cd9380b00e23b0c5aa
--- /dev/null
+++ b/lib/hachoir_core/field/integer.py
@@ -0,0 +1,44 @@
+"""
+Integer field classes:
+- UInt8, UInt16, UInt24, UInt32, UInt64: unsigned integer of 8, 16, 32, 64 bits ;
+- Int8, Int16, Int24, Int32, Int64: signed integer of 8, 16, 32, 64 bits.
+"""
+
+from hachoir_core.field import Bits, FieldError
+
+class GenericInteger(Bits):
+    """
+    Generic integer class used to generate other classes.
+    """
+    def __init__(self, parent, name, signed, size, description=None):
+        if not (8 <= size <= 256):
+            raise FieldError("Invalid integer size (%s): have to be in 8..256" % size)
+        Bits.__init__(self, parent, name, size, description)
+        self.signed = signed
+
+    def createValue(self):
+        return self._parent.stream.readInteger(
+            self.absolute_address, self.signed, self._size, self._parent.endian)
+
+def integerFactory(name, is_signed, size, doc):
+    class Integer(GenericInteger):
+        __doc__ = doc
+        static_size = size
+        def __init__(self, parent, name, description=None):
+            GenericInteger.__init__(self, parent, name, is_signed, size, description)
+    cls = Integer
+    cls.__name__ = name
+    return cls
+
+UInt8 = integerFactory("UInt8", False, 8, "Unsigned integer of 8 bits")
+UInt16 = integerFactory("UInt16", False, 16, "Unsigned integer of 16 bits")
+UInt24 = integerFactory("UInt24", False, 24, "Unsigned integer of 24 bits")
+UInt32 = integerFactory("UInt32", False, 32, "Unsigned integer of 32 bits")
+UInt64 = integerFactory("UInt64", False, 64, "Unsigned integer of 64 bits")
+
+Int8 = integerFactory("Int8", True, 8, "Signed integer of 8 bits")
+Int16 = integerFactory("Int16", True, 16, "Signed integer of 16 bits")
+Int24 = integerFactory("Int24", True, 24, "Signed integer of 24 bits")
+Int32 = integerFactory("Int32", True, 32, "Signed integer of 32 bits")
+Int64 = integerFactory("Int64", True, 64, "Signed integer of 64 bits")
+
diff --git a/lib/hachoir_core/field/link.py b/lib/hachoir_core/field/link.py
new file mode 100644
index 0000000000000000000000000000000000000000..b331c3b880e7fdb9bd890fd08e5d5fa3b638b735
--- /dev/null
+++ b/lib/hachoir_core/field/link.py
@@ -0,0 +1,109 @@
+from hachoir_core.field import Field, FieldSet, ParserError, Bytes, MissingField
+from hachoir_core.stream import FragmentedStream
+
+
+class Link(Field):
+    def __init__(self, parent, name, *args, **kw):
+        Field.__init__(self, parent, name, 0, *args, **kw)
+
+    def hasValue(self):
+        return True
+
+    def createValue(self):
+        return self._parent[self.display]
+
+    def createDisplay(self):
+        value = self.value
+        if value is None:
+            return "<%s>" % MissingField.__name__
+        return value.path
+
+    def _getField(self, name, const):
+        target = self.value
+        assert self != target
+        return target._getField(name, const)
+
+
+class Fragments:
+    def __init__(self, first):
+        self.first = first
+
+    def __iter__(self):
+        fragment = self.first
+        while fragment is not None:
+            data = fragment.getData()
+            yield data and data.size
+            fragment = fragment.next
+
+
+class Fragment(FieldSet):
+    _first = None
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._field_generator = self._createFields(self._field_generator)
+        if self.__class__.createFields == Fragment.createFields:
+            self._getData = lambda: self
+
+    def getData(self):
+        try:
+            return self._getData()
+        except MissingField, e:
+            self.error(str(e))
+        return None
+
+    def setLinks(self, first, next=None):
+        self._first = first or self
+        self._next = next
+        self._feedLinks = lambda: self
+        return self
+
+    def _feedLinks(self):
+        while self._first is None and self.readMoreFields(1):
+            pass
+        if self._first is None:
+            raise ParserError("first is None")
+        return self
+    first = property(lambda self: self._feedLinks()._first)
+
+    def _getNext(self):
+        next = self._feedLinks()._next
+        if callable(next):
+            self._next = next = next()
+        return next
+    next  = property(_getNext)
+
+    def _createInputStream(self, **args):
+        first = self.first
+        if first is self and hasattr(first, "_getData"):
+            return FragmentedStream(first, packets=Fragments(first), **args)
+        return FieldSet._createInputStream(self, **args)
+
+    def _createFields(self, field_generator):
+        if self._first is None:
+            for field in field_generator:
+                if self._first is not None:
+                    break
+                yield field
+            else:
+                raise ParserError("Fragment.setLinks not called")
+        else:
+            field = None
+        if self._first is not self:
+            link = Link(self, "first", None)
+            link._getValue = lambda: self._first
+            yield link
+        if self._next:
+            link = Link(self, "next", None)
+            link.createValue = self._getNext
+            yield link
+        if field:
+            yield field
+        for field in field_generator:
+            yield field
+
+    def createFields(self):
+        if self._size is None:
+            self._size = self._getSize()
+        yield Bytes(self, "data", self._size/8)
+
diff --git a/lib/hachoir_core/field/new_seekable_field_set.py b/lib/hachoir_core/field/new_seekable_field_set.py
new file mode 100644
index 0000000000000000000000000000000000000000..d145ab961dfd57cf9c30455ea23f8f5a0f6493ec
--- /dev/null
+++ b/lib/hachoir_core/field/new_seekable_field_set.py
@@ -0,0 +1,82 @@
+from hachoir_core.field import BasicFieldSet, GenericFieldSet, ParserError, createRawField
+from hachoir_core.error import HACHOIR_ERRORS
+
+# getgaps(int, int, [listof (int, int)]) -> generator of (int, int)
+# Gets all the gaps not covered by a block in `blocks` from `start` for `length` units.
+def getgaps(start, length, blocks):
+    '''
+    Example:
+    >>> list(getgaps(0, 20, [(15,3), (6,2), (6,2), (1,2), (2,3), (11,2), (9,5)]))
+    [(0, 1), (5, 1), (8, 1), (14, 1), (18, 2)]
+    '''
+    # done this way to avoid mutating the original
+    blocks = sorted(blocks, key=lambda b: b[0])
+    end = start+length
+    for s, l in blocks:
+        if s > start:
+            yield (start, s-start)
+            start = s
+        if s+l > start:
+            start = s+l
+    if start < end:
+        yield (start, end-start)
+
+class NewRootSeekableFieldSet(GenericFieldSet):
+    def seekBit(self, address, relative=True):
+        if not relative:
+            address -= self.absolute_address
+        if address < 0:
+            raise ParserError("Seek below field set start (%s.%s)" % divmod(address, 8))
+        self._current_size = address
+        return None
+
+    def seekByte(self, address, relative=True):
+        return self.seekBit(address*8, relative)
+
+    def _fixLastField(self):
+        """
+        Try to fix last field when we know current field set size.
+        Returns new added field if any, or None.
+        """
+        assert self._size is not None
+
+        # Stop parser
+        message = ["stop parser"]
+        self._field_generator = None
+
+        # If last field is too big, delete it
+        while self._size < self._current_size:
+            field = self._deleteField(len(self._fields)-1)
+            message.append("delete field %s" % field.path)
+        assert self._current_size <= self._size
+
+        blocks = [(x.absolute_address, x.size) for x in self._fields]
+        fields = []
+        for start, length in getgaps(self.absolute_address, self._size, blocks):
+            self.seekBit(start, relative=False)
+            field = createRawField(self, length, "unparsed[]")
+            self.setUniqueFieldName(field)
+            self._fields.append(field.name, field)
+            fields.append(field)
+            message.append("found unparsed segment: start %s, length %s" % (start, length))
+
+        self.seekBit(self._size, relative=False)
+        message = ", ".join(message)
+        if fields:
+            self.warning("[Autofix] Fix parser error: " + message)
+        return fields
+
+    def _stopFeeding(self):
+        new_field = None
+        if self._size is None:
+            if self._parent:
+                self._size = self._current_size
+
+        new_field = self._fixLastField()
+        self._field_generator = None
+        return new_field
+
+class NewSeekableFieldSet(NewRootSeekableFieldSet):
+    def __init__(self, parent, name, description=None, size=None):
+        assert issubclass(parent.__class__, BasicFieldSet)
+        NewRootSeekableFieldSet.__init__(self, parent, name, parent.stream, description, size)
diff --git a/lib/hachoir_core/field/padding.py b/lib/hachoir_core/field/padding.py
new file mode 100644
index 0000000000000000000000000000000000000000..c1c4b8c07a62b1dcf5e32fa520ddb28f0c0e45a2
--- /dev/null
+++ b/lib/hachoir_core/field/padding.py
@@ -0,0 +1,138 @@
+from hachoir_core.field import Bits, Bytes
+from hachoir_core.tools import makePrintable, humanFilesize
+from hachoir_core import config
+
+class PaddingBits(Bits):
+    """
+    Padding bits used, for example, to align address (of next field).
+    See also NullBits and PaddingBytes types.
+
+    Arguments:
+     * nbits: Size of the field in bits
+
+    Optional arguments:
+     * pattern (int): Content pattern, eg. 0 if all bits are set to 0
+    """
+    static_size = staticmethod(lambda *args, **kw: args[1])
+    MAX_SIZE = 128
+
+    def __init__(self, parent, name, nbits, description="Padding", pattern=None):
+        Bits.__init__(self, parent, name, nbits, description)
+        self.pattern = pattern
+        self._display_pattern = self.checkPattern()
+
+    def checkPattern(self):
+        if not(config.check_padding_pattern):
+            return False
+        if self.pattern != 0:
+            return False
+
+        if self.MAX_SIZE < self._size:
+            value = self._parent.stream.readBits(
+                self.absolute_address, self.MAX_SIZE, self._parent.endian)
+        else:
+            value = self.value
+        if value != 0:
+            self.warning("padding contents doesn't look normal (invalid pattern)")
+            return False
+        if self.MAX_SIZE < self._size:
+            self.info("only check first %u bits" % self.MAX_SIZE)
+        return True
+
+    def createDisplay(self):
+        if self._display_pattern:
+            return u"<padding pattern=%s>" % self.pattern
+        else:
+            return Bits.createDisplay(self)
+
+class PaddingBytes(Bytes):
+    """
+    Padding bytes used, for example, to align address (of next field).
+    See also NullBytes and PaddingBits types.
+
+    Arguments:
+     * nbytes: Size of the field in bytes
+
+    Optional arguments:
+     * pattern (str): Content pattern, eg. "\0" for nul bytes
+    """
+
+    static_size = staticmethod(lambda *args, **kw: args[1]*8)
+    MAX_SIZE = 4096
+
+    def __init__(self, parent, name, nbytes,
+    description="Padding", pattern=None):
+        """ pattern is None or repeated string """
+        assert (pattern is None) or (isinstance(pattern, str))
+        Bytes.__init__(self, parent, name, nbytes, description)
+        self.pattern = pattern
+        self._display_pattern = self.checkPattern()
+
+    def checkPattern(self):
+        if not(config.check_padding_pattern):
+            return False
+        if self.pattern is None:
+            return False
+
+        if self.MAX_SIZE < self._size/8:
+            self.info("only check first %s of padding" % humanFilesize(self.MAX_SIZE))
+            content = self._parent.stream.readBytes(
+                self.absolute_address, self.MAX_SIZE)
+        else:
+            content = self.value
+        index = 0
+        pattern_len = len(self.pattern)
+        while index < len(content):
+            if content[index:index+pattern_len] != self.pattern:
+                self.warning(
+                    "padding contents doesn't look normal"
+                    " (invalid pattern at byte %u)!"
+                    % index)
+                return False
+            index += pattern_len
+        return True
+
+    def createDisplay(self):
+        if self._display_pattern:
+            return u"<padding pattern=%s>" % makePrintable(self.pattern, "ASCII", quote="'")
+        else:
+            return Bytes.createDisplay(self)
+
+    def createRawDisplay(self):
+        return Bytes.createDisplay(self)
+
+class NullBits(PaddingBits):
+    """
+    Null padding bits used, for example, to align address (of next field).
+    See also PaddingBits and NullBytes types.
+
+    Arguments:
+     * nbits: Size of the field in bits
+    """
+
+    def __init__(self, parent, name, nbits, description=None):
+        PaddingBits.__init__(self, parent, name, nbits, description, pattern=0)
+
+    def createDisplay(self):
+        if self._display_pattern:
+            return "<null>"
+        else:
+            return Bits.createDisplay(self)
+
+class NullBytes(PaddingBytes):
+    """
+    Null padding bytes used, for example, to align address (of next field).
+    See also PaddingBytes and NullBits types.
+
+    Arguments:
+     * nbytes: Size of the field in bytes
+    """
+    def __init__(self, parent, name, nbytes, description=None):
+        PaddingBytes.__init__(self, parent, name, nbytes, description, pattern="\0")
+
+    def createDisplay(self):
+        if self._display_pattern:
+            return "<null>"
+        else:
+            return Bytes.createDisplay(self)
+
diff --git a/lib/hachoir_core/field/parser.py b/lib/hachoir_core/field/parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..e294e0246576d56ecbdf96d808c82393d6feeccc
--- /dev/null
+++ b/lib/hachoir_core/field/parser.py
@@ -0,0 +1,40 @@
+from hachoir_core.endian import BIG_ENDIAN, LITTLE_ENDIAN
+from hachoir_core.field import GenericFieldSet
+from hachoir_core.log import Logger
+import hachoir_core.config as config
+
+class Parser(GenericFieldSet):
+    """
+    A parser is the root of all other fields. It create first level of fields
+    and have special attributes and methods:
+    - endian: Byte order (L{BIG_ENDIAN} or L{LITTLE_ENDIAN}) of input data ;
+    - stream: Data input stream (set in L{__init__()}) ;
+    - size: Field set size will be size of input stream.
+    """
+
+    def __init__(self, stream, description=None):
+        """
+        Parser constructor
+
+        @param stream: Data input stream (see L{InputStream})
+        @param description: (optional) String description
+        """
+        # Check arguments
+        assert hasattr(self, "endian") \
+            and self.endian in (BIG_ENDIAN, LITTLE_ENDIAN)
+
+        # Call parent constructor
+        GenericFieldSet.__init__(self, None, "root", stream, description, stream.askSize(self))
+
+    def _logger(self):
+        return Logger._logger(self)
+
+    def _setSize(self, size):
+        self._truncate(size)
+        self.raiseEvent("field-resized", self)
+    size = property(lambda self: self._size, doc="Size in bits")
+
+    path = property(lambda self: "/")
+
+    # dummy definition to prevent hachoir-core from depending on hachoir-parser
+    autofix = property(lambda self: config.autofix)
diff --git a/lib/hachoir_core/field/seekable_field_set.py b/lib/hachoir_core/field/seekable_field_set.py
new file mode 100644
index 0000000000000000000000000000000000000000..9bc3fbbc131e0262e4d11c3a2fc180e64af9b2ff
--- /dev/null
+++ b/lib/hachoir_core/field/seekable_field_set.py
@@ -0,0 +1,182 @@
+from hachoir_core.field import Field, BasicFieldSet, FakeArray, MissingField, ParserError
+from hachoir_core.tools import makeUnicode
+from hachoir_core.error import HACHOIR_ERRORS
+from itertools import repeat
+import hachoir_core.config as config
+
+class RootSeekableFieldSet(BasicFieldSet):
+    def __init__(self, parent, name, stream, description, size):
+        BasicFieldSet.__init__(self, parent, name, stream, description, size)
+        self._generator = self.createFields()
+        self._offset = 0
+        self._current_size = 0
+        if size:
+            self._current_max_size = size
+        else:
+            self._current_max_size = 0
+        self._field_dict = {}
+        self._field_array = []
+
+    def _feedOne(self):
+        assert self._generator
+        field = self._generator.next()
+        self._addField(field)
+        return field
+
+    def array(self, key):
+        return FakeArray(self, key)
+
+    def getFieldByAddress(self, address, feed=True):
+        for field in self._field_array:
+            if field.address <= address < field.address + field.size:
+                return field
+        for field in self._readFields():
+            if field.address <= address < field.address + field.size:
+                return field
+        return None
+
+    def _stopFeed(self):
+        self._size = self._current_max_size
+        self._generator = None
+    done = property(lambda self: not bool(self._generator))
+
+    def _getSize(self):
+        if self._size is None:
+            self._feedAll()
+        return self._size
+    size = property(_getSize)
+
+    def _getField(self, key, const):
+        field = Field._getField(self, key, const)
+        if field is not None:
+            return field
+        if key in self._field_dict:
+            return self._field_dict[key]
+        if self._generator and not const:
+            try:
+                while True:
+                    field = self._feedOne()
+                    if field.name == key:
+                        return field
+            except StopIteration:
+                self._stopFeed()
+            except HACHOIR_ERRORS, err:
+                self.error("Error: %s" % makeUnicode(err))
+                self._stopFeed()
+        return None
+
+    def getField(self, key, const=True):
+        if isinstance(key, (int, long)):
+            if key < 0:
+                raise KeyError("Key must be positive!")
+            if not const:
+                self.readFirstFields(key+1)
+            if len(self._field_array) <= key:
+                raise MissingField(self, key)
+            return self._field_array[key]
+        return Field.getField(self, key, const)
+
+    def _addField(self, field):
+        if field._name.endswith("[]"):
+            self.setUniqueFieldName(field)
+        if config.debug:
+            self.info("[+] DBG: _addField(%s)" % field.name)
+
+        if field._address != self._offset:
+            self.warning("Set field %s address to %s (was %s)" % (
+                field.path, self._offset//8, field._address//8))
+            field._address = self._offset
+        assert field.name not in self._field_dict
+
+        self._checkFieldSize(field)
+
+        self._field_dict[field.name] = field
+        self._field_array.append(field)
+        self._current_size += field.size
+        self._offset += field.size
+        self._current_max_size = max(self._current_max_size, field.address + field.size)
+
+    def _checkAddress(self, address):
+        if self._size is not None:
+            max_addr = self._size
+        else:
+            # FIXME: Use parent size
+            max_addr = self.stream.size
+        return address < max_addr
+
+    def _checkFieldSize(self, field):
+        size = field.size
+        addr = field.address
+        if not self._checkAddress(addr+size-1):
+            raise ParserError("Unable to add %s: field is too large" % field.name)
+
+    def seekBit(self, address, relative=True):
+        if not relative:
+            address -= self.absolute_address
+        if address < 0:
+            raise ParserError("Seek below field set start (%s.%s)" % divmod(address, 8))
+        if not self._checkAddress(address):
+            raise ParserError("Seek above field set end (%s.%s)" % divmod(address, 8))
+        self._offset = address
+        return None
+
+    def seekByte(self, address, relative=True):
+        return self.seekBit(address*8, relative)
+
+    def readMoreFields(self, number):
+        return self._readMoreFields(xrange(number))
+
+    def _feedAll(self):
+        return self._readMoreFields(repeat(1))
+
+    def _readFields(self):
+        while True:
+            added = self._readMoreFields(xrange(1))
+            if not added:
+                break
+            yield self._field_array[-1]
+
+    def _readMoreFields(self, index_generator):
+        added = 0
+        if self._generator:
+            try:
+                for index in index_generator:
+                    self._feedOne()
+                    added += 1
+            except StopIteration:
+                self._stopFeed()
+            except HACHOIR_ERRORS, err:
+                self.error("Error: %s" % makeUnicode(err))
+                self._stopFeed()
+        return added
+
+    current_length = property(lambda self: len(self._field_array))
+    current_size = property(lambda self: self._offset)
+
+    def __iter__(self):
+        for field in self._field_array:
+            yield field
+        if self._generator:
+            try:
+                while True:
+                    yield self._feedOne()
+            except StopIteration:
+                self._stopFeed()
+                raise StopIteration
+
+    def __len__(self):
+        if self._generator:
+            self._feedAll()
+        return len(self._field_array)
+
+    def nextFieldAddress(self):
+        return self._offset
+
+    def getFieldIndex(self, field):
+        return self._field_array.index(field)
+
+class SeekableFieldSet(RootSeekableFieldSet):
+    def __init__(self, parent, name, description=None, size=None):
+        assert issubclass(parent.__class__, BasicFieldSet)
+        RootSeekableFieldSet.__init__(self, parent, name, parent.stream, description, size)
+
diff --git a/lib/hachoir_core/field/static_field_set.py b/lib/hachoir_core/field/static_field_set.py
new file mode 100644
index 0000000000000000000000000000000000000000..afe6cdb07967e6036f43b8b2e24c75eae2eecf5f
--- /dev/null
+++ b/lib/hachoir_core/field/static_field_set.py
@@ -0,0 +1,54 @@
+from hachoir_core.field import FieldSet, ParserError
+
+class StaticFieldSet(FieldSet):
+    """
+    Static field set: format class attribute is a tuple of all fields
+    in syntax like:
+       format = (
+          (TYPE1, ARG1, ARG2, ...),
+          (TYPE2, ARG1, ARG2, ..., {KEY1=VALUE1, ...}),
+          ...
+       )
+
+    Types with dynamic size are forbidden, eg. CString, PascalString8, etc.
+    """
+    format = None  # You have to redefine this class variable
+    _class = None
+
+    def __new__(cls, *args, **kw):
+        assert cls.format is not None, "Class attribute 'format' is not set"
+        if cls._class is not cls.__name__:
+            cls._class = cls.__name__
+            cls.static_size = cls._computeStaticSize()
+        return object.__new__(cls)
+
+    @staticmethod
+    def _computeItemSize(item):
+        item_class = item[0]
+        if item_class.static_size is None:
+            raise ParserError("Unable to get static size of field type: %s"
+                % item_class.__name__)
+        if callable(item_class.static_size):
+            if isinstance(item[-1], dict):
+                return item_class.static_size(*item[1:-1], **item[-1])
+            else:
+                return item_class.static_size(*item[1:])
+        else:
+            assert isinstance(item_class.static_size, (int, long))
+            return item_class.static_size
+
+    def createFields(self):
+        for item in self.format:
+            if isinstance(item[-1], dict):
+                yield item[0](self, *item[1:-1], **item[-1])
+            else:
+                yield item[0](self, *item[1:])
+
+    @classmethod
+    def _computeStaticSize(cls, *args):
+        return sum(cls._computeItemSize(item) for item in cls.format)
+
+    # Initial value of static_size, it changes when first instance
+    # is created (see __new__)
+    static_size = _computeStaticSize
+
diff --git a/lib/hachoir_core/field/string_field.py b/lib/hachoir_core/field/string_field.py
new file mode 100644
index 0000000000000000000000000000000000000000..e44e24dc69053fe0f9409958cee98461911f3aeb
--- /dev/null
+++ b/lib/hachoir_core/field/string_field.py
@@ -0,0 +1,402 @@
+"""
+String field classes:
+- String: Fixed length string (no prefix/no suffix) ;
+- CString: String which ends with nul byte ("\0") ;
+- UnixLine: Unix line of text, string which ends with "\n" ;
+- PascalString8, PascalString16, PascalString32: String prefixed with
+  length written in a 8, 16, 32-bit integer (use parent endian).
+
+Constructor has optional arguments:
+- strip: value can be a string or True ;
+- charset: if set, convert string to unicode using this charset (in "replace"
+  mode which replace all buggy characters with ".").
+
+Note: For PascalStringXX, prefixed value is the number of bytes and not
+      of characters!
+"""
+
+from hachoir_core.field import FieldError, Bytes
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+from hachoir_core.tools import alignValue, makePrintable
+from hachoir_core.i18n import guessBytesCharset, _
+from hachoir_core import config
+from codecs import BOM_UTF16_LE, BOM_UTF16_BE, BOM_UTF32_LE, BOM_UTF32_BE
+
+# Default charset used to convert byte string to Unicode
+# This charset is used if no charset is specified or on conversion error
+FALLBACK_CHARSET = "ISO-8859-1"
+
+class GenericString(Bytes):
+    """
+    Generic string class.
+
+    charset have to be in CHARSET_8BIT or in UTF_CHARSET.
+    """
+
+    VALID_FORMATS = ("C", "UnixLine",
+        "fixed", "Pascal8", "Pascal16", "Pascal32")
+
+    # 8-bit charsets
+    CHARSET_8BIT = set((
+        "ASCII",          # ANSI X3.4-1968
+        "MacRoman",
+        "CP037",          # EBCDIC 037
+        "CP874",          # Thai
+        "WINDOWS-1250",   # Central Europe
+        "WINDOWS-1251",   # Cyrillic
+        "WINDOWS-1252",   # Latin I
+        "WINDOWS-1253",   # Greek
+        "WINDOWS-1254",   # Turkish
+        "WINDOWS-1255",   # Hebrew
+        "WINDOWS-1256",   # Arabic
+        "WINDOWS-1257",   # Baltic
+        "WINDOWS-1258",   # Vietnam
+        "ISO-8859-1",     # Latin-1
+        "ISO-8859-2",     # Latin-2
+        "ISO-8859-3",     # Latin-3
+        "ISO-8859-4",     # Latin-4
+        "ISO-8859-5",
+        "ISO-8859-6",
+        "ISO-8859-7",
+        "ISO-8859-8",
+        "ISO-8859-9",     # Latin-5
+        "ISO-8859-10",    # Latin-6
+        "ISO-8859-11",    # Thai
+        "ISO-8859-13",    # Latin-7
+        "ISO-8859-14",    # Latin-8
+        "ISO-8859-15",    # Latin-9 or ("Latin-0")
+        "ISO-8859-16",    # Latin-10
+    ))
+
+    # UTF-xx charset familly
+    UTF_CHARSET = {
+        "UTF-8": (8, None),
+        "UTF-16-LE": (16, LITTLE_ENDIAN),
+        "UTF-32LE": (32, LITTLE_ENDIAN),
+        "UTF-16-BE": (16, BIG_ENDIAN),
+        "UTF-32BE": (32, BIG_ENDIAN),
+        "UTF-16": (16, "BOM"),
+        "UTF-32": (32, "BOM"),
+    }
+
+    # UTF-xx BOM => charset with endian
+    UTF_BOM = {
+        16: {BOM_UTF16_LE: "UTF-16-LE", BOM_UTF16_BE: "UTF-16-BE"},
+        32: {BOM_UTF32_LE: "UTF-32LE", BOM_UTF32_BE: "UTF-32BE"},
+    }
+
+    # Suffix format: value is suffix (string)
+    SUFFIX_FORMAT = {
+        "C": {
+             8: {LITTLE_ENDIAN: "\0",       BIG_ENDIAN: "\0"},
+            16: {LITTLE_ENDIAN: "\0\0",     BIG_ENDIAN: "\0\0"},
+            32: {LITTLE_ENDIAN: "\0\0\0\0", BIG_ENDIAN: "\0\0\0\0"},
+        },
+        "UnixLine": {
+             8: {LITTLE_ENDIAN: "\n",       BIG_ENDIAN: "\n"},
+            16: {LITTLE_ENDIAN: "\n\0",     BIG_ENDIAN: "\0\n"},
+            32: {LITTLE_ENDIAN: "\n\0\0\0", BIG_ENDIAN: "\0\0\0\n"},
+        },
+
+    }
+
+    # Pascal format: value is the size of the prefix in bits
+    PASCAL_FORMATS = {
+        "Pascal8":  1,
+        "Pascal16": 2,
+        "Pascal32": 4
+    }
+
+    # Raw value: with prefix and suffix, not stripped,
+    # and not converted to Unicode
+    _raw_value = None
+
+    def __init__(self, parent, name, format, description=None,
+    strip=None, charset=None, nbytes=None, truncate=None):
+        Bytes.__init__(self, parent, name, 1, description)
+
+        # Is format valid?
+        assert format in self.VALID_FORMATS
+
+        # Store options
+        self._format = format
+        self._strip = strip
+        self._truncate = truncate
+
+        # Check charset and compute character size in bytes
+        # (or None when it's not possible to guess character size)
+        if not charset or charset in self.CHARSET_8BIT:
+            self._character_size = 1   # one byte per character
+        elif charset in self.UTF_CHARSET:
+            self._character_size = None
+        else:
+            raise FieldError("Invalid charset for %s: \"%s\"" %
+                (self.path, charset))
+        self._charset = charset
+
+        # It is a fixed string?
+        if nbytes is not None:
+            assert self._format == "fixed"
+            # Arbitrary limits, just to catch some bugs...
+            if not (1 <= nbytes <= 0xffff):
+                raise FieldError("Invalid string size for %s: %s" %
+                    (self.path, nbytes))
+            self._content_size = nbytes   # content length in bytes
+            self._size = nbytes * 8
+            self._content_offset = 0
+        else:
+            # Format with a suffix: Find the end of the string
+            if self._format in self.SUFFIX_FORMAT:
+                self._content_offset = 0
+
+                # Choose the suffix
+                suffix = self.suffix_str
+
+                # Find the suffix
+                length = self._parent.stream.searchBytesLength(
+                    suffix, False, self.absolute_address)
+                if length is None:
+                    raise FieldError("Unable to find end of string %s (format %s)!"
+                        % (self.path, self._format))
+                if 1 < len(suffix):
+                    # Fix length for little endian bug with UTF-xx charset:
+                    #   u"abc" -> "a\0b\0c\0\0\0" (UTF-16-LE)
+                    #   search returns length=5, whereas real lenght is 6
+                    length = alignValue(length, len(suffix))
+
+                # Compute sizes
+                self._content_size = length # in bytes
+                self._size = (length + len(suffix)) * 8
+
+            # Format with a prefix: Read prefixed length in bytes
+            else:
+                assert self._format in self.PASCAL_FORMATS
+
+                # Get the prefix size
+                prefix_size = self.PASCAL_FORMATS[self._format]
+                self._content_offset = prefix_size
+
+                # Read the prefix and compute sizes
+                value = self._parent.stream.readBits(
+                    self.absolute_address, prefix_size*8, self._parent.endian)
+                self._content_size = value   # in bytes
+                self._size = (prefix_size + value) * 8
+
+        # For UTF-16 and UTF-32, choose the right charset using BOM
+        if self._charset in self.UTF_CHARSET:
+            # Charset requires a BOM?
+            bomsize, endian  = self.UTF_CHARSET[self._charset]
+            if endian == "BOM":
+                # Read the BOM value
+                nbytes = bomsize // 8
+                bom = self._parent.stream.readBytes(self.absolute_address, nbytes)
+
+                # Choose right charset using the BOM
+                bom_endian = self.UTF_BOM[bomsize]
+                if bom not in bom_endian:
+                    raise FieldError("String %s has invalid BOM (%s)!"
+                        % (self.path, repr(bom)))
+                self._charset = bom_endian[bom]
+                self._content_size -= nbytes
+                self._content_offset += nbytes
+
+        # Compute length in character if possible
+        if self._character_size:
+            self._length = self._content_size //  self._character_size
+        else:
+            self._length = None
+
+    @staticmethod
+    def staticSuffixStr(format, charset, endian):
+        if format not in GenericString.SUFFIX_FORMAT:
+            return ''
+        suffix = GenericString.SUFFIX_FORMAT[format]
+        if charset in GenericString.UTF_CHARSET:
+            suffix_size = GenericString.UTF_CHARSET[charset][0]
+            suffix = suffix[suffix_size]
+        else:
+            suffix = suffix[8]
+        return suffix[endian]
+
+    def _getSuffixStr(self):
+        return self.staticSuffixStr(
+            self._format, self._charset, self._parent.endian)
+    suffix_str = property(_getSuffixStr)
+
+    def _convertText(self, text):
+        if not self._charset:
+            # charset is still unknown: guess the charset
+            self._charset = guessBytesCharset(text, default=FALLBACK_CHARSET)
+
+        # Try to convert to Unicode
+        try:
+            return unicode(text, self._charset, "strict")
+        except UnicodeDecodeError, err:
+            pass
+
+        #--- Conversion error ---
+
+        # Fix truncated UTF-16 string like 'B\0e' (3 bytes)
+        # => Add missing nul byte: 'B\0e\0' (4 bytes)
+        if err.reason == "truncated data" \
+        and err.end == len(text) \
+        and self._charset == "UTF-16-LE":
+            try:
+                text = unicode(text+"\0", self._charset, "strict")
+                self.warning("Fix truncated %s string: add missing nul byte" % self._charset)
+                return text
+            except UnicodeDecodeError, err:
+                pass
+
+        # On error, use FALLBACK_CHARSET
+        self.warning(u"Unable to convert string to Unicode: %s" % err)
+        return unicode(text, FALLBACK_CHARSET, "strict")
+
+    def _guessCharset(self):
+        addr = self.absolute_address + self._content_offset * 8
+        bytes = self._parent.stream.readBytes(addr, self._content_size)
+        return guessBytesCharset(bytes, default=FALLBACK_CHARSET)
+
+    def createValue(self, human=True):
+        # Compress data address (in bits) and size (in bytes)
+        if human:
+            addr = self.absolute_address + self._content_offset * 8
+            size = self._content_size
+        else:
+            addr = self.absolute_address
+            size = self._size // 8
+        if size == 0:
+            # Empty string
+            return u""
+
+        # Read bytes in data stream
+        text = self._parent.stream.readBytes(addr, size)
+
+        # Don't transform data?
+        if not human:
+            return text
+
+        # Convert text to Unicode
+        text = self._convertText(text)
+
+        # Truncate
+        if self._truncate:
+            pos = text.find(self._truncate)
+            if 0 <= pos:
+                text = text[:pos]
+
+        # Strip string if needed
+        if self._strip:
+            if isinstance(self._strip, (str, unicode)):
+                text = text.strip(self._strip)
+            else:
+                text = text.strip()
+        assert isinstance(text, unicode)
+        return text
+
+    def createDisplay(self, human=True):
+        if not human:
+            if self._raw_value is None:
+                self._raw_value = GenericString.createValue(self, False)
+            value = makePrintable(self._raw_value, "ASCII", to_unicode=True)
+        elif self._charset:
+            value = makePrintable(self.value, "ISO-8859-1", to_unicode=True)
+        else:
+            value = self.value
+        if config.max_string_length < len(value):
+            # Truncate string if needed
+            value = "%s(...)" % value[:config.max_string_length]
+        if not self._charset or not human:
+            return makePrintable(value, "ASCII", quote='"', to_unicode=True)
+        else:
+            if value:
+                return '"%s"' % value.replace('"', '\\"')
+            else:
+                return _("(empty)")
+
+    def createRawDisplay(self):
+        return GenericString.createDisplay(self, human=False)
+
+    def _getLength(self):
+        if self._length is None:
+            self._length = len(self.value)
+        return self._length
+    length = property(_getLength, doc="String length in characters")
+
+    def _getFormat(self):
+        return self._format
+    format = property(_getFormat, doc="String format (eg. 'C')")
+
+    def _getCharset(self):
+        if not self._charset:
+            self._charset = self._guessCharset()
+        return self._charset
+    charset = property(_getCharset, doc="String charset (eg. 'ISO-8859-1')")
+
+    def _getContentSize(self):
+        return self._content_size
+    content_size = property(_getContentSize, doc="Content size in bytes")
+
+    def _getContentOffset(self):
+        return self._content_offset
+    content_offset = property(_getContentOffset, doc="Content offset in bytes")
+
+    def getFieldType(self):
+        info = self.charset
+        if self._strip:
+            if isinstance(self._strip, (str, unicode)):
+                info += ",strip=%s" % makePrintable(self._strip, "ASCII", quote="'")
+            else:
+                info += ",strip=True"
+        return "%s<%s>" % (Bytes.getFieldType(self), info)
+
+def stringFactory(name, format, doc):
+    class NewString(GenericString):
+        __doc__ = doc
+        def __init__(self, parent, name, description=None,
+        strip=None, charset=None, truncate=None):
+            GenericString.__init__(self, parent, name, format, description,
+            strip=strip, charset=charset, truncate=truncate)
+    cls = NewString
+    cls.__name__ = name
+    return cls
+
+# String which ends with nul byte ("\0")
+CString = stringFactory("CString", "C",
+    r"""C string: string ending with nul byte.
+See GenericString to get more information.""")
+
+# Unix line of text: string which ends with "\n" (ASCII 0x0A)
+UnixLine = stringFactory("UnixLine", "UnixLine",
+    r"""Unix line: string ending with "\n" (ASCII code 10).
+See GenericString to get more information.""")
+
+# String prefixed with length written in a 8-bit integer
+PascalString8 = stringFactory("PascalString8", "Pascal8",
+    r"""Pascal string: string prefixed with 8-bit integer containing its length (endian depends on parent endian).
+See GenericString to get more information.""")
+
+# String prefixed with length written in a 16-bit integer (use parent endian)
+PascalString16 = stringFactory("PascalString16", "Pascal16",
+    r"""Pascal string: string prefixed with 16-bit integer containing its length (endian depends on parent endian).
+See GenericString to get more information.""")
+
+# String prefixed with length written in a 32-bit integer (use parent endian)
+PascalString32 = stringFactory("PascalString32", "Pascal32",
+    r"""Pascal string: string prefixed with 32-bit integer containing its length (endian depends on parent endian).
+See GenericString to get more information.""")
+
+
+class String(GenericString):
+    """
+    String with fixed size (size in bytes).
+    See GenericString to get more information.
+    """
+    static_size = staticmethod(lambda *args, **kw: args[1]*8)
+
+    def __init__(self, parent, name, nbytes, description=None,
+    strip=None, charset=None, truncate=None):
+        GenericString.__init__(self, parent, name, "fixed", description,
+            strip=strip, charset=charset, nbytes=nbytes, truncate=truncate)
+String.__name__ = "FixedString"
+
diff --git a/lib/hachoir_core/field/sub_file.py b/lib/hachoir_core/field/sub_file.py
new file mode 100644
index 0000000000000000000000000000000000000000..0f2912d403aea55f8984786d17f48e1e05ba7f04
--- /dev/null
+++ b/lib/hachoir_core/field/sub_file.py
@@ -0,0 +1,72 @@
+from hachoir_core.field import Bytes
+from hachoir_core.tools import makePrintable, humanFilesize
+from hachoir_core.stream import InputIOStream
+
+class SubFile(Bytes):
+    """
+    File stored in another file
+    """
+    def __init__(self, parent, name, length, description=None,
+    parser=None, filename=None, mime_type=None, parser_class=None):
+        if filename:
+            if not isinstance(filename, unicode):
+                filename = makePrintable(filename, "ISO-8859-1")
+            if not description:
+                description = 'File "%s" (%s)' % (filename, humanFilesize(length))
+        Bytes.__init__(self, parent, name, length, description)
+        def createInputStream(cis, **args):
+            tags = args.setdefault("tags",[])
+            if parser_class:
+                tags.append(( "class", parser_class ))
+            if parser is not None:
+                tags.append(( "id", parser.PARSER_TAGS["id"] ))
+            if mime_type:
+                tags.append(( "mime", mime_type ))
+            if filename:
+                tags.append(( "filename", filename ))
+            return cis(**args)
+        self.setSubIStream(createInputStream)
+
+class CompressedStream:
+    offset = 0
+
+    def __init__(self, stream, decompressor):
+        self.stream = stream
+        self.decompressor = decompressor(stream)
+        self._buffer = ''
+
+    def read(self, size):
+        d = self._buffer
+        data = [ d[:size] ]
+        size -= len(d)
+        if size > 0:
+            d = self.decompressor(size)
+            data.append(d[:size])
+            size -= len(d)
+            while size > 0:
+                n = 4096
+                if self.stream.size:
+                    n = min(self.stream.size - self.offset, n)
+                    if not n:
+                        break
+                d = self.stream.read(self.offset, n)[1]
+                self.offset += 8 * len(d)
+                d = self.decompressor(size, d)
+                data.append(d[:size])
+                size -= len(d)
+        self._buffer = d[size+len(d):]
+        return ''.join(data)
+
+def CompressedField(field, decompressor):
+    def createInputStream(cis, source=None, **args):
+        if field._parent:
+            stream = cis(source=source)
+            args.setdefault("tags", []).extend(stream.tags)
+        else:
+            stream = field.stream
+        input = CompressedStream(stream, decompressor)
+        if source is None:
+            source = "Compressed source: '%s' (offset=%s)" % (stream.source, field.absolute_address)
+        return InputIOStream(input, source=source, **args)
+    field.setSubIStream(createInputStream)
+    return field
diff --git a/lib/hachoir_core/field/timestamp.py b/lib/hachoir_core/field/timestamp.py
new file mode 100644
index 0000000000000000000000000000000000000000..a533a4b22200c0f8855760dc667408782fc59a36
--- /dev/null
+++ b/lib/hachoir_core/field/timestamp.py
@@ -0,0 +1,86 @@
+from hachoir_core.tools import (humanDatetime, humanDuration,
+    timestampUNIX, timestampMac32, timestampUUID60,
+    timestampWin64, durationWin64)
+from hachoir_core.field import Bits, FieldSet
+from datetime import datetime
+
+class GenericTimestamp(Bits):
+    def __init__(self, parent, name, size, description=None):
+        Bits.__init__(self, parent, name, size, description)
+
+    def createDisplay(self):
+        return humanDatetime(self.value)
+
+    def createRawDisplay(self):
+        value = Bits.createValue(self)
+        return unicode(value)
+
+    def __nonzero__(self):
+        return Bits.createValue(self) != 0
+
+def timestampFactory(cls_name, handler, size):
+    class Timestamp(GenericTimestamp):
+        def __init__(self, parent, name, description=None):
+            GenericTimestamp.__init__(self, parent, name, size, description)
+
+        def createValue(self):
+            value = Bits.createValue(self)
+            return handler(value)
+    cls = Timestamp
+    cls.__name__ = cls_name
+    return cls
+
+TimestampUnix32 = timestampFactory("TimestampUnix32", timestampUNIX, 32)
+TimestampUnix64 = timestampFactory("TimestampUnix64", timestampUNIX, 64)
+TimestampMac32 = timestampFactory("TimestampUnix32", timestampMac32, 32)
+TimestampUUID60 = timestampFactory("TimestampUUID60", timestampUUID60, 60)
+TimestampWin64 = timestampFactory("TimestampWin64", timestampWin64, 64)
+
+class TimeDateMSDOS32(FieldSet):
+    """
+    32-bit MS-DOS timestamp (16-bit time, 16-bit date)
+    """
+    static_size = 32
+
+    def createFields(self):
+        # TODO: Create type "MSDOS_Second" : value*2
+        yield Bits(self, "second", 5, "Second/2")
+        yield Bits(self, "minute", 6)
+        yield Bits(self, "hour", 5)
+
+        yield Bits(self, "day", 5)
+        yield Bits(self, "month", 4)
+        # TODO: Create type "MSDOS_Year" : value+1980
+        yield Bits(self, "year", 7, "Number of year after 1980")
+
+    def createValue(self):
+        return datetime(
+            1980+self["year"].value, self["month"].value, self["day"].value,
+            self["hour"].value, self["minute"].value, 2*self["second"].value)
+
+    def createDisplay(self):
+        return humanDatetime(self.value)
+
+class DateTimeMSDOS32(TimeDateMSDOS32):
+    """
+    32-bit MS-DOS timestamp (16-bit date, 16-bit time)
+    """
+    def createFields(self):
+        yield Bits(self, "day", 5)
+        yield Bits(self, "month", 4)
+        yield Bits(self, "year", 7, "Number of year after 1980")
+        yield Bits(self, "second", 5, "Second/2")
+        yield Bits(self, "minute", 6)
+        yield Bits(self, "hour", 5)
+
+class TimedeltaWin64(GenericTimestamp):
+    def __init__(self, parent, name, description=None):
+        GenericTimestamp.__init__(self, parent, name, 64, description)
+
+    def createDisplay(self):
+        return humanDuration(self.value)
+
+    def createValue(self):
+        value = Bits.createValue(self)
+        return durationWin64(value)
+
diff --git a/lib/hachoir_core/field/vector.py b/lib/hachoir_core/field/vector.py
new file mode 100644
index 0000000000000000000000000000000000000000..953fdbc3d27053c9eb200580806bae720fc76588
--- /dev/null
+++ b/lib/hachoir_core/field/vector.py
@@ -0,0 +1,38 @@
+from hachoir_core.field import Field, FieldSet, ParserError
+
+class GenericVector(FieldSet):
+    def __init__(self, parent, name, nb_items, item_class, item_name="item", description=None):
+        # Sanity checks
+        assert issubclass(item_class, Field)
+        assert isinstance(item_class.static_size, (int, long))
+        if not(0 < nb_items):
+            raise ParserError('Unable to create empty vector "%s" in %s' \
+                % (name, parent.path))
+        size = nb_items * item_class.static_size
+        self.__nb_items = nb_items
+        self._item_class = item_class
+        self._item_name = item_name
+        FieldSet.__init__(self, parent, name, description, size=size)
+
+    def __len__(self):
+        return self.__nb_items
+
+    def createFields(self):
+        name = self._item_name + "[]"
+        parser = self._item_class
+        for index in xrange(len(self)):
+            yield parser(self, name)
+
+class UserVector(GenericVector):
+    """
+    To implement:
+    - item_name: name of a field without [] (eg. "color" becomes "color[0]"),
+      default value is "item"
+    - item_class: class of an item
+    """
+    item_class = None
+    item_name = "item"
+
+    def __init__(self, parent, name, nb_items, description=None):
+        GenericVector.__init__(self, parent, name, nb_items, self.item_class, self.item_name, description)
+
diff --git a/lib/hachoir_core/i18n.py b/lib/hachoir_core/i18n.py
new file mode 100644
index 0000000000000000000000000000000000000000..b34c748072e90f1f8f3019ea3491c48eb4f5b8a3
--- /dev/null
+++ b/lib/hachoir_core/i18n.py
@@ -0,0 +1,214 @@
+# -*- coding: UTF-8 -*-
+"""
+Functions to manage internationalisation (i18n):
+- initLocale(): setup locales and install Unicode compatible stdout and
+  stderr ;
+- getTerminalCharset(): guess terminal charset ;
+- gettext(text) translate a string to current language. The function always
+  returns Unicode string. You can also use the alias: _() ;
+- ngettext(singular, plural, count): translate a sentence with singular and
+  plural form. The function always returns Unicode string.
+
+WARNING: Loading this module indirectly calls initLocale() which sets
+         locale LC_ALL to ''. This is needed to get user preferred locale
+         settings.
+"""
+
+import hachoir_core.config as config
+import hachoir_core
+import locale
+from os import path
+import sys
+from codecs import BOM_UTF8, BOM_UTF16_LE, BOM_UTF16_BE
+
+def _getTerminalCharset():
+    """
+    Function used by getTerminalCharset() to get terminal charset.
+
+    @see getTerminalCharset()
+    """
+    # (1) Try locale.getpreferredencoding()
+    try:
+        charset = locale.getpreferredencoding()
+        if charset:
+            return charset
+    except (locale.Error, AttributeError):
+        pass
+
+    # (2) Try locale.nl_langinfo(CODESET)
+    try:
+        charset = locale.nl_langinfo(locale.CODESET)
+        if charset:
+            return charset
+    except (locale.Error, AttributeError):
+        pass
+
+    # (3) Try sys.stdout.encoding
+    if hasattr(sys.stdout, "encoding") and sys.stdout.encoding:
+        return sys.stdout.encoding
+
+    # (4) Otherwise, returns "ASCII"
+    return "ASCII"
+
+def getTerminalCharset():
+    """
+    Guess terminal charset using differents tests:
+    1. Try locale.getpreferredencoding()
+    2. Try locale.nl_langinfo(CODESET)
+    3. Try sys.stdout.encoding
+    4. Otherwise, returns "ASCII"
+
+    WARNING: Call initLocale() before calling this function.
+    """
+    try:
+        return getTerminalCharset.value
+    except AttributeError:
+        getTerminalCharset.value = _getTerminalCharset()
+        return getTerminalCharset.value
+
+class UnicodeStdout(object):
+    def __init__(self, old_device, charset):
+        self.device = old_device
+        self.charset = charset
+
+    def flush(self):
+        self.device.flush()
+
+    def write(self, text):
+        if isinstance(text, unicode):
+            text = text.encode(self.charset, 'replace')
+        self.device.write(text)
+
+    def writelines(self, lines):
+        for text in lines:
+            self.write(text)
+
+def initLocale():
+    # Only initialize locale once
+    if initLocale.is_done:
+        return getTerminalCharset()
+    initLocale.is_done = True
+
+    # Setup locales
+    try:
+        locale.setlocale(locale.LC_ALL, "")
+    except (locale.Error, IOError):
+        pass
+
+    # Get the terminal charset
+    charset = getTerminalCharset()
+
+    # UnicodeStdout conflicts with the readline module
+    if config.unicode_stdout and ('readline' not in sys.modules):
+        # Replace stdout and stderr by unicode objet supporting unicode string
+        sys.stdout = UnicodeStdout(sys.stdout, charset)
+        sys.stderr = UnicodeStdout(sys.stderr, charset)
+    return charset
+initLocale.is_done = False
+
+def _dummy_gettext(text):
+    return unicode(text)
+
+def _dummy_ngettext(singular, plural, count):
+    if 1 < abs(count) or not count:
+        return unicode(plural)
+    else:
+        return unicode(singular)
+
+def _initGettext():
+    charset = initLocale()
+
+    # Try to load gettext module
+    if config.use_i18n:
+        try:
+            import gettext
+            ok = True
+        except ImportError:
+            ok = False
+    else:
+        ok = False
+
+    # gettext is not available or not needed: use dummy gettext functions
+    if not ok:
+        return (_dummy_gettext, _dummy_ngettext)
+
+    # Gettext variables
+    package = hachoir_core.PACKAGE
+    locale_dir = path.join(path.dirname(__file__), "..", "locale")
+
+    # Initialize gettext module
+    gettext.bindtextdomain(package, locale_dir)
+    gettext.textdomain(package)
+    translate = gettext.gettext
+    ngettext = gettext.ngettext
+
+    # TODO: translate_unicode lambda function really sucks!
+    # => find native function to do that
+    unicode_gettext = lambda text: \
+        unicode(translate(text), charset)
+    unicode_ngettext = lambda singular, plural, count: \
+        unicode(ngettext(singular, plural, count), charset)
+    return (unicode_gettext, unicode_ngettext)
+
+UTF_BOMS = (
+    (BOM_UTF8, "UTF-8"),
+    (BOM_UTF16_LE, "UTF-16-LE"),
+    (BOM_UTF16_BE, "UTF-16-BE"),
+)
+
+# Set of valid characters for specific charset
+CHARSET_CHARACTERS = (
+    # U+00E0: LATIN SMALL LETTER A WITH GRAVE
+    (set(u"©®éêè\xE0ç".encode("ISO-8859-1")), "ISO-8859-1"),
+    (set(u"©®éêè\xE0ç€".encode("ISO-8859-15")), "ISO-8859-15"),
+    (set(u"©®".encode("MacRoman")), "MacRoman"),
+    (set(u"εδηιθκμοΡσςυΈί".encode("ISO-8859-7")), "ISO-8859-7"),
+)
+
+def guessBytesCharset(bytes, default=None):
+    r"""
+    >>> guessBytesCharset("abc")
+    'ASCII'
+    >>> guessBytesCharset("\xEF\xBB\xBFabc")
+    'UTF-8'
+    >>> guessBytesCharset("abc\xC3\xA9")
+    'UTF-8'
+    >>> guessBytesCharset("File written by Adobe Photoshop\xA8 4.0\0")
+    'MacRoman'
+    >>> guessBytesCharset("\xE9l\xE9phant")
+    'ISO-8859-1'
+    >>> guessBytesCharset("100 \xA4")
+    'ISO-8859-15'
+    >>> guessBytesCharset('Word \xb8\xea\xe4\xef\xf3\xe7 - Microsoft Outlook 97 - \xd1\xf5\xe8\xec\xdf\xf3\xe5\xe9\xf2 e-mail')
+    'ISO-8859-7'
+    """
+    # Check for UTF BOM
+    for bom_bytes, charset in UTF_BOMS:
+        if bytes.startswith(bom_bytes):
+            return charset
+
+    # Pure ASCII?
+    try:
+        text = unicode(bytes, 'ASCII', 'strict')
+        return 'ASCII'
+    except UnicodeDecodeError:
+        pass
+
+    # Valid UTF-8?
+    try:
+        text = unicode(bytes, 'UTF-8', 'strict')
+        return 'UTF-8'
+    except UnicodeDecodeError:
+        pass
+
+    # Create a set of non-ASCII characters
+    non_ascii_set = set( byte for byte in bytes if ord(byte) >= 128 )
+    for characters, charset in CHARSET_CHARACTERS:
+        if characters.issuperset(non_ascii_set):
+            return charset
+    return default
+
+# Initialize _(), gettext() and ngettext() functions
+gettext, ngettext = _initGettext()
+_ = gettext
+
diff --git a/lib/hachoir_core/iso639.py b/lib/hachoir_core/iso639.py
new file mode 100644
index 0000000000000000000000000000000000000000..61a0ba939bd4f7e3443a5f8008c7644e15f3ab23
--- /dev/null
+++ b/lib/hachoir_core/iso639.py
@@ -0,0 +1,558 @@
+# -*- coding: utf-8 -*-
+"""
+ISO639-2 standart: the module only contains the dictionary ISO639_2
+which maps a language code in three letters (eg. "fre") to a language
+name in english (eg. "French").
+"""
+
+# ISO-639, the list comes from:
+# http://www.loc.gov/standards/iso639-2/php/English_list.php
+_ISO639 = (
+    (u"Abkhazian", "abk", "ab"),
+    (u"Achinese", "ace", None),
+    (u"Acoli", "ach", None),
+    (u"Adangme", "ada", None),
+    (u"Adygei", "ady", None),
+    (u"Adyghe", "ady", None),
+    (u"Afar", "aar", "aa"),
+    (u"Afrihili", "afh", None),
+    (u"Afrikaans", "afr", "af"),
+    (u"Afro-Asiatic (Other)", "afa", None),
+    (u"Ainu", "ain", None),
+    (u"Akan", "aka", "ak"),
+    (u"Akkadian", "akk", None),
+    (u"Albanian", "alb/sqi", "sq"),
+    (u"Alemani", "gsw", None),
+    (u"Aleut", "ale", None),
+    (u"Algonquian languages", "alg", None),
+    (u"Altaic (Other)", "tut", None),
+    (u"Amharic", "amh", "am"),
+    (u"Angika", "anp", None),
+    (u"Apache languages", "apa", None),
+    (u"Arabic", "ara", "ar"),
+    (u"Aragonese", "arg", "an"),
+    (u"Aramaic", "arc", None),
+    (u"Arapaho", "arp", None),
+    (u"Araucanian", "arn", None),
+    (u"Arawak", "arw", None),
+    (u"Armenian", "arm/hye", "hy"),
+    (u"Aromanian", "rup", None),
+    (u"Artificial (Other)", "art", None),
+    (u"Arumanian", "rup", None),
+    (u"Assamese", "asm", "as"),
+    (u"Asturian", "ast", None),
+    (u"Athapascan languages", "ath", None),
+    (u"Australian languages", "aus", None),
+    (u"Austronesian (Other)", "map", None),
+    (u"Avaric", "ava", "av"),
+    (u"Avestan", "ave", "ae"),
+    (u"Awadhi", "awa", None),
+    (u"Aymara", "aym", "ay"),
+    (u"Azerbaijani", "aze", "az"),
+    (u"Bable", "ast", None),
+    (u"Balinese", "ban", None),
+    (u"Baltic (Other)", "bat", None),
+    (u"Baluchi", "bal", None),
+    (u"Bambara", "bam", "bm"),
+    (u"Bamileke languages", "bai", None),
+    (u"Banda", "bad", None),
+    (u"Bantu (Other)", "bnt", None),
+    (u"Basa", "bas", None),
+    (u"Bashkir", "bak", "ba"),
+    (u"Basque", "baq/eus", "eu"),
+    (u"Batak (Indonesia)", "btk", None),
+    (u"Beja", "bej", None),
+    (u"Belarusian", "bel", "be"),
+    (u"Bemba", "bem", None),
+    (u"Bengali", "ben", "bn"),
+    (u"Berber (Other)", "ber", None),
+    (u"Bhojpuri", "bho", None),
+    (u"Bihari", "bih", "bh"),
+    (u"Bikol", "bik", None),
+    (u"Bilin", "byn", None),
+    (u"Bini", "bin", None),
+    (u"Bislama", "bis", "bi"),
+    (u"Blin", "byn", None),
+    (u"Bokmål, Norwegian", "nob", "nb"),
+    (u"Bosnian", "bos", "bs"),
+    (u"Braj", "bra", None),
+    (u"Breton", "bre", "br"),
+    (u"Buginese", "bug", None),
+    (u"Bulgarian", "bul", "bg"),
+    (u"Buriat", "bua", None),
+    (u"Burmese", "bur/mya", "my"),
+    (u"Caddo", "cad", None),
+    (u"Carib", "car", None),
+    (u"Castilian", "spa", "es"),
+    (u"Catalan", "cat", "ca"),
+    (u"Caucasian (Other)", "cau", None),
+    (u"Cebuano", "ceb", None),
+    (u"Celtic (Other)", "cel", None),
+    (u"Central American Indian (Other)", "cai", None),
+    (u"Chagatai", "chg", None),
+    (u"Chamic languages", "cmc", None),
+    (u"Chamorro", "cha", "ch"),
+    (u"Chechen", "che", "ce"),
+    (u"Cherokee", "chr", None),
+    (u"Chewa", "nya", "ny"),
+    (u"Cheyenne", "chy", None),
+    (u"Chibcha", "chb", None),
+    (u"Chichewa", "nya", "ny"),
+    (u"Chinese", "chi/zho", "zh"),
+    (u"Chinook jargon", "chn", None),
+    (u"Chipewyan", "chp", None),
+    (u"Choctaw", "cho", None),
+    (u"Chuang", "zha", "za"),
+    (u"Church Slavic", "chu", "cu"),
+    (u"Church Slavonic", "chu", "cu"),
+    (u"Chuukese", "chk", None),
+    (u"Chuvash", "chv", "cv"),
+    (u"Classical Nepal Bhasa", "nwc", None),
+    (u"Classical Newari", "nwc", None),
+    (u"Coptic", "cop", None),
+    (u"Cornish", "cor", "kw"),
+    (u"Corsican", "cos", "co"),
+    (u"Cree", "cre", "cr"),
+    (u"Creek", "mus", None),
+    (u"Creoles and pidgins (Other)", "crp", None),
+    (u"Creoles and pidgins, English based (Other)", "cpe", None),
+    (u"Creoles and pidgins, French-based (Other)", "cpf", None),
+    (u"Creoles and pidgins, Portuguese-based (Other)", "cpp", None),
+    (u"Crimean Tatar", "crh", None),
+    (u"Crimean Turkish", "crh", None),
+    (u"Croatian", "scr/hrv", "hr"),
+    (u"Cushitic (Other)", "cus", None),
+    (u"Czech", "cze/ces", "cs"),
+    (u"Dakota", "dak", None),
+    (u"Danish", "dan", "da"),
+    (u"Dargwa", "dar", None),
+    (u"Dayak", "day", None),
+    (u"Delaware", "del", None),
+    (u"Dhivehi", "div", "dv"),
+    (u"Dimili", "zza", None),
+    (u"Dimli", "zza", None),
+    (u"Dinka", "din", None),
+    (u"Divehi", "div", "dv"),
+    (u"Dogri", "doi", None),
+    (u"Dogrib", "dgr", None),
+    (u"Dravidian (Other)", "dra", None),
+    (u"Duala", "dua", None),
+    (u"Dutch", "dut/nld", "nl"),
+    (u"Dutch, Middle (ca.1050-1350)", "dum", None),
+    (u"Dyula", "dyu", None),
+    (u"Dzongkha", "dzo", "dz"),
+    (u"Eastern Frisian", "frs", None),
+    (u"Efik", "efi", None),
+    (u"Egyptian (Ancient)", "egy", None),
+    (u"Ekajuk", "eka", None),
+    (u"Elamite", "elx", None),
+    (u"English", "eng", "en"),
+    (u"English, Middle (1100-1500)", "enm", None),
+    (u"English, Old (ca.450-1100)", "ang", None),
+    (u"Erzya", "myv", None),
+    (u"Esperanto", "epo", "eo"),
+    (u"Estonian", "est", "et"),
+    (u"Ewe", "ewe", "ee"),
+    (u"Ewondo", "ewo", None),
+    (u"Fang", "fan", None),
+    (u"Fanti", "fat", None),
+    (u"Faroese", "fao", "fo"),
+    (u"Fijian", "fij", "fj"),
+    (u"Filipino", "fil", None),
+    (u"Finnish", "fin", "fi"),
+    (u"Finno-Ugrian (Other)", "fiu", None),
+    (u"Flemish", "dut/nld", "nl"),
+    (u"Fon", "fon", None),
+    (u"French", "fre/fra", "fr"),
+    (u"French, Middle (ca.1400-1600)", "frm", None),
+    (u"French, Old (842-ca.1400)", "fro", None),
+    (u"Friulian", "fur", None),
+    (u"Fulah", "ful", "ff"),
+    (u"Ga", "gaa", None),
+    (u"Gaelic", "gla", "gd"),
+    (u"Galician", "glg", "gl"),
+    (u"Ganda", "lug", "lg"),
+    (u"Gayo", "gay", None),
+    (u"Gbaya", "gba", None),
+    (u"Geez", "gez", None),
+    (u"Georgian", "geo/kat", "ka"),
+    (u"German", "ger/deu", "de"),
+    (u"German, Low", "nds", None),
+    (u"German, Middle High (ca.1050-1500)", "gmh", None),
+    (u"German, Old High (ca.750-1050)", "goh", None),
+    (u"Germanic (Other)", "gem", None),
+    (u"Gikuyu", "kik", "ki"),
+    (u"Gilbertese", "gil", None),
+    (u"Gondi", "gon", None),
+    (u"Gorontalo", "gor", None),
+    (u"Gothic", "got", None),
+    (u"Grebo", "grb", None),
+    (u"Greek, Ancient (to 1453)", "grc", None),
+    (u"Greek, Modern (1453-)", "gre/ell", "el"),
+    (u"Greenlandic", "kal", "kl"),
+    (u"Guarani", "grn", "gn"),
+    (u"Gujarati", "guj", "gu"),
+    (u"Gwich´in", "gwi", None),
+    (u"Haida", "hai", None),
+    (u"Haitian", "hat", "ht"),
+    (u"Haitian Creole", "hat", "ht"),
+    (u"Hausa", "hau", "ha"),
+    (u"Hawaiian", "haw", None),
+    (u"Hebrew", "heb", "he"),
+    (u"Herero", "her", "hz"),
+    (u"Hiligaynon", "hil", None),
+    (u"Himachali", "him", None),
+    (u"Hindi", "hin", "hi"),
+    (u"Hiri Motu", "hmo", "ho"),
+    (u"Hittite", "hit", None),
+    (u"Hmong", "hmn", None),
+    (u"Hungarian", "hun", "hu"),
+    (u"Hupa", "hup", None),
+    (u"Iban", "iba", None),
+    (u"Icelandic", "ice/isl", "is"),
+    (u"Ido", "ido", "io"),
+    (u"Igbo", "ibo", "ig"),
+    (u"Ijo", "ijo", None),
+    (u"Iloko", "ilo", None),
+    (u"Inari Sami", "smn", None),
+    (u"Indic (Other)", "inc", None),
+    (u"Indo-European (Other)", "ine", None),
+    (u"Indonesian", "ind", "id"),
+    (u"Ingush", "inh", None),
+    (u"Interlingua", "ina", "ia"),
+    (u"Interlingue", "ile", "ie"),
+    (u"Inuktitut", "iku", "iu"),
+    (u"Inupiaq", "ipk", "ik"),
+    (u"Iranian (Other)", "ira", None),
+    (u"Irish", "gle", "ga"),
+    (u"Irish, Middle (900-1200)", "mga", None),
+    (u"Irish, Old (to 900)", "sga", None),
+    (u"Iroquoian languages", "iro", None),
+    (u"Italian", "ita", "it"),
+    (u"Japanese", "jpn", "ja"),
+    (u"Javanese", "jav", "jv"),
+    (u"Judeo-Arabic", "jrb", None),
+    (u"Judeo-Persian", "jpr", None),
+    (u"Kabardian", "kbd", None),
+    (u"Kabyle", "kab", None),
+    (u"Kachin", "kac", None),
+    (u"Kalaallisut", "kal", "kl"),
+    (u"Kalmyk", "xal", None),
+    (u"Kamba", "kam", None),
+    (u"Kannada", "kan", "kn"),
+    (u"Kanuri", "kau", "kr"),
+    (u"Kara-Kalpak", "kaa", None),
+    (u"Karachay-Balkar", "krc", None),
+    (u"Karelian", "krl", None),
+    (u"Karen", "kar", None),
+    (u"Kashmiri", "kas", "ks"),
+    (u"Kashubian", "csb", None),
+    (u"Kawi", "kaw", None),
+    (u"Kazakh", "kaz", "kk"),
+    (u"Khasi", "kha", None),
+    (u"Khmer", "khm", "km"),
+    (u"Khoisan (Other)", "khi", None),
+    (u"Khotanese", "kho", None),
+    (u"Kikuyu", "kik", "ki"),
+    (u"Kimbundu", "kmb", None),
+    (u"Kinyarwanda", "kin", "rw"),
+    (u"Kirdki", "zza", None),
+    (u"Kirghiz", "kir", "ky"),
+    (u"Kirmanjki", "zza", None),
+    (u"Klingon", "tlh", None),
+    (u"Komi", "kom", "kv"),
+    (u"Kongo", "kon", "kg"),
+    (u"Konkani", "kok", None),
+    (u"Korean", "kor", "ko"),
+    (u"Kosraean", "kos", None),
+    (u"Kpelle", "kpe", None),
+    (u"Kru", "kro", None),
+    (u"Kuanyama", "kua", "kj"),
+    (u"Kumyk", "kum", None),
+    (u"Kurdish", "kur", "ku"),
+    (u"Kurukh", "kru", None),
+    (u"Kutenai", "kut", None),
+    (u"Kwanyama", "kua", "kj"),
+    (u"Ladino", "lad", None),
+    (u"Lahnda", "lah", None),
+    (u"Lamba", "lam", None),
+    (u"Lao", "lao", "lo"),
+    (u"Latin", "lat", "la"),
+    (u"Latvian", "lav", "lv"),
+    (u"Letzeburgesch", "ltz", "lb"),
+    (u"Lezghian", "lez", None),
+    (u"Limburgan", "lim", "li"),
+    (u"Limburger", "lim", "li"),
+    (u"Limburgish", "lim", "li"),
+    (u"Lingala", "lin", "ln"),
+    (u"Lithuanian", "lit", "lt"),
+    (u"Lojban", "jbo", None),
+    (u"Low German", "nds", None),
+    (u"Low Saxon", "nds", None),
+    (u"Lower Sorbian", "dsb", None),
+    (u"Lozi", "loz", None),
+    (u"Luba-Katanga", "lub", "lu"),
+    (u"Luba-Lulua", "lua", None),
+    (u"Luiseno", "lui", None),
+    (u"Lule Sami", "smj", None),
+    (u"Lunda", "lun", None),
+    (u"Luo (Kenya and Tanzania)", "luo", None),
+    (u"Lushai", "lus", None),
+    (u"Luxembourgish", "ltz", "lb"),
+    (u"Macedo-Romanian", "rup", None),
+    (u"Macedonian", "mac/mkd", "mk"),
+    (u"Madurese", "mad", None),
+    (u"Magahi", "mag", None),
+    (u"Maithili", "mai", None),
+    (u"Makasar", "mak", None),
+    (u"Malagasy", "mlg", "mg"),
+    (u"Malay", "may/msa", "ms"),
+    (u"Malayalam", "mal", "ml"),
+    (u"Maldivian", "div", "dv"),
+    (u"Maltese", "mlt", "mt"),
+    (u"Manchu", "mnc", None),
+    (u"Mandar", "mdr", None),
+    (u"Mandingo", "man", None),
+    (u"Manipuri", "mni", None),
+    (u"Manobo languages", "mno", None),
+    (u"Manx", "glv", "gv"),
+    (u"Maori", "mao/mri", "mi"),
+    (u"Marathi", "mar", "mr"),
+    (u"Mari", "chm", None),
+    (u"Marshallese", "mah", "mh"),
+    (u"Marwari", "mwr", None),
+    (u"Masai", "mas", None),
+    (u"Mayan languages", "myn", None),
+    (u"Mende", "men", None),
+    (u"Mi'kmaq", "mic", None),
+    (u"Micmac", "mic", None),
+    (u"Minangkabau", "min", None),
+    (u"Mirandese", "mwl", None),
+    (u"Miscellaneous languages", "mis", None),
+    (u"Mohawk", "moh", None),
+    (u"Moksha", "mdf", None),
+    (u"Moldavian", "mol", "mo"),
+    (u"Mon-Khmer (Other)", "mkh", None),
+    (u"Mongo", "lol", None),
+    (u"Mongolian", "mon", "mn"),
+    (u"Mossi", "mos", None),
+    (u"Multiple languages", "mul", None),
+    (u"Munda languages", "mun", None),
+    (u"N'Ko", "nqo", None),
+    (u"Nahuatl", "nah", None),
+    (u"Nauru", "nau", "na"),
+    (u"Navaho", "nav", "nv"),
+    (u"Navajo", "nav", "nv"),
+    (u"Ndebele, North", "nde", "nd"),
+    (u"Ndebele, South", "nbl", "nr"),
+    (u"Ndonga", "ndo", "ng"),
+    (u"Neapolitan", "nap", None),
+    (u"Nepal Bhasa", "new", None),
+    (u"Nepali", "nep", "ne"),
+    (u"Newari", "new", None),
+    (u"Nias", "nia", None),
+    (u"Niger-Kordofanian (Other)", "nic", None),
+    (u"Nilo-Saharan (Other)", "ssa", None),
+    (u"Niuean", "niu", None),
+    (u"No linguistic content", "zxx", None),
+    (u"Nogai", "nog", None),
+    (u"Norse, Old", "non", None),
+    (u"North American Indian", "nai", None),
+    (u"North Ndebele", "nde", "nd"),
+    (u"Northern Frisian", "frr", None),
+    (u"Northern Sami", "sme", "se"),
+    (u"Northern Sotho", "nso", None),
+    (u"Norwegian", "nor", "no"),
+    (u"Norwegian Bokmål", "nob", "nb"),
+    (u"Norwegian Nynorsk", "nno", "nn"),
+    (u"Nubian languages", "nub", None),
+    (u"Nyamwezi", "nym", None),
+    (u"Nyanja", "nya", "ny"),
+    (u"Nyankole", "nyn", None),
+    (u"Nynorsk, Norwegian", "nno", "nn"),
+    (u"Nyoro", "nyo", None),
+    (u"Nzima", "nzi", None),
+    (u"Occitan (post 1500)", "oci", "oc"),
+    (u"Oirat", "xal", None),
+    (u"Ojibwa", "oji", "oj"),
+    (u"Old Bulgarian", "chu", "cu"),
+    (u"Old Church Slavonic", "chu", "cu"),
+    (u"Old Newari", "nwc", None),
+    (u"Old Slavonic", "chu", "cu"),
+    (u"Oriya", "ori", "or"),
+    (u"Oromo", "orm", "om"),
+    (u"Osage", "osa", None),
+    (u"Ossetian", "oss", "os"),
+    (u"Ossetic", "oss", "os"),
+    (u"Otomian languages", "oto", None),
+    (u"Pahlavi", "pal", None),
+    (u"Palauan", "pau", None),
+    (u"Pali", "pli", "pi"),
+    (u"Pampanga", "pam", None),
+    (u"Pangasinan", "pag", None),
+    (u"Panjabi", "pan", "pa"),
+    (u"Papiamento", "pap", None),
+    (u"Papuan (Other)", "paa", None),
+    (u"Pedi", "nso", None),
+    (u"Persian", "per/fas", "fa"),
+    (u"Persian, Old (ca.600-400 B.C.)", "peo", None),
+    (u"Philippine (Other)", "phi", None),
+    (u"Phoenician", "phn", None),
+    (u"Pilipino", "fil", None),
+    (u"Pohnpeian", "pon", None),
+    (u"Polish", "pol", "pl"),
+    (u"Portuguese", "por", "pt"),
+    (u"Prakrit languages", "pra", None),
+    (u"Provençal", "oci", "oc"),
+    (u"Provençal, Old (to 1500)", "pro", None),
+    (u"Punjabi", "pan", "pa"),
+    (u"Pushto", "pus", "ps"),
+    (u"Quechua", "que", "qu"),
+    (u"Raeto-Romance", "roh", "rm"),
+    (u"Rajasthani", "raj", None),
+    (u"Rapanui", "rap", None),
+    (u"Rarotongan", "rar", None),
+    (u"Reserved for local use", "qaa/qtz", None),
+    (u"Romance (Other)", "roa", None),
+    (u"Romanian", "rum/ron", "ro"),
+    (u"Romany", "rom", None),
+    (u"Rundi", "run", "rn"),
+    (u"Russian", "rus", "ru"),
+    (u"Salishan languages", "sal", None),
+    (u"Samaritan Aramaic", "sam", None),
+    (u"Sami languages (Other)", "smi", None),
+    (u"Samoan", "smo", "sm"),
+    (u"Sandawe", "sad", None),
+    (u"Sango", "sag", "sg"),
+    (u"Sanskrit", "san", "sa"),
+    (u"Santali", "sat", None),
+    (u"Sardinian", "srd", "sc"),
+    (u"Sasak", "sas", None),
+    (u"Saxon, Low", "nds", None),
+    (u"Scots", "sco", None),
+    (u"Scottish Gaelic", "gla", "gd"),
+    (u"Selkup", "sel", None),
+    (u"Semitic (Other)", "sem", None),
+    (u"Sepedi", "nso", None),
+    (u"Serbian", "scc/srp", "sr"),
+    (u"Serer", "srr", None),
+    (u"Shan", "shn", None),
+    (u"Shona", "sna", "sn"),
+    (u"Sichuan Yi", "iii", "ii"),
+    (u"Sicilian", "scn", None),
+    (u"Sidamo", "sid", None),
+    (u"Sign Languages", "sgn", None),
+    (u"Siksika", "bla", None),
+    (u"Sindhi", "snd", "sd"),
+    (u"Sinhala", "sin", "si"),
+    (u"Sinhalese", "sin", "si"),
+    (u"Sino-Tibetan (Other)", "sit", None),
+    (u"Siouan languages", "sio", None),
+    (u"Skolt Sami", "sms", None),
+    (u"Slave (Athapascan)", "den", None),
+    (u"Slavic (Other)", "sla", None),
+    (u"Slovak", "slo/slk", "sk"),
+    (u"Slovenian", "slv", "sl"),
+    (u"Sogdian", "sog", None),
+    (u"Somali", "som", "so"),
+    (u"Songhai", "son", None),
+    (u"Soninke", "snk", None),
+    (u"Sorbian languages", "wen", None),
+    (u"Sotho, Northern", "nso", None),
+    (u"Sotho, Southern", "sot", "st"),
+    (u"South American Indian (Other)", "sai", None),
+    (u"South Ndebele", "nbl", "nr"),
+    (u"Southern Altai", "alt", None),
+    (u"Southern Sami", "sma", None),
+    (u"Spanish", "spa", "es"),
+    (u"Sranan Togo", "srn", None),
+    (u"Sukuma", "suk", None),
+    (u"Sumerian", "sux", None),
+    (u"Sundanese", "sun", "su"),
+    (u"Susu", "sus", None),
+    (u"Swahili", "swa", "sw"),
+    (u"Swati", "ssw", "ss"),
+    (u"Swedish", "swe", "sv"),
+    (u"Swiss German", "gsw", None),
+    (u"Syriac", "syr", None),
+    (u"Tagalog", "tgl", "tl"),
+    (u"Tahitian", "tah", "ty"),
+    (u"Tai (Other)", "tai", None),
+    (u"Tajik", "tgk", "tg"),
+    (u"Tamashek", "tmh", None),
+    (u"Tamil", "tam", "ta"),
+    (u"Tatar", "tat", "tt"),
+    (u"Telugu", "tel", "te"),
+    (u"Tereno", "ter", None),
+    (u"Tetum", "tet", None),
+    (u"Thai", "tha", "th"),
+    (u"Tibetan", "tib/bod", "bo"),
+    (u"Tigre", "tig", None),
+    (u"Tigrinya", "tir", "ti"),
+    (u"Timne", "tem", None),
+    (u"Tiv", "tiv", None),
+    (u"tlhIngan-Hol", "tlh", None),
+    (u"Tlingit", "tli", None),
+    (u"Tok Pisin", "tpi", None),
+    (u"Tokelau", "tkl", None),
+    (u"Tonga (Nyasa)", "tog", None),
+    (u"Tonga (Tonga Islands)", "ton", "to"),
+    (u"Tsimshian", "tsi", None),
+    (u"Tsonga", "tso", "ts"),
+    (u"Tswana", "tsn", "tn"),
+    (u"Tumbuka", "tum", None),
+    (u"Tupi languages", "tup", None),
+    (u"Turkish", "tur", "tr"),
+    (u"Turkish, Ottoman (1500-1928)", "ota", None),
+    (u"Turkmen", "tuk", "tk"),
+    (u"Tuvalu", "tvl", None),
+    (u"Tuvinian", "tyv", None),
+    (u"Twi", "twi", "tw"),
+    (u"Udmurt", "udm", None),
+    (u"Ugaritic", "uga", None),
+    (u"Uighur", "uig", "ug"),
+    (u"Ukrainian", "ukr", "uk"),
+    (u"Umbundu", "umb", None),
+    (u"Undetermined", "und", None),
+    (u"Upper Sorbian", "hsb", None),
+    (u"Urdu", "urd", "ur"),
+    (u"Uyghur", "uig", "ug"),
+    (u"Uzbek", "uzb", "uz"),
+    (u"Vai", "vai", None),
+    (u"Valencian", "cat", "ca"),
+    (u"Venda", "ven", "ve"),
+    (u"Vietnamese", "vie", "vi"),
+    (u"Volapük", "vol", "vo"),
+    (u"Votic", "vot", None),
+    (u"Wakashan languages", "wak", None),
+    (u"Walamo", "wal", None),
+    (u"Walloon", "wln", "wa"),
+    (u"Waray", "war", None),
+    (u"Washo", "was", None),
+    (u"Welsh", "wel/cym", "cy"),
+    (u"Western Frisian", "fry", "fy"),
+    (u"Wolof", "wol", "wo"),
+    (u"Xhosa", "xho", "xh"),
+    (u"Yakut", "sah", None),
+    (u"Yao", "yao", None),
+    (u"Yapese", "yap", None),
+    (u"Yiddish", "yid", "yi"),
+    (u"Yoruba", "yor", "yo"),
+    (u"Yupik languages", "ypk", None),
+    (u"Zande", "znd", None),
+    (u"Zapotec", "zap", None),
+    (u"Zaza", "zza", None),
+    (u"Zazaki", "zza", None),
+    (u"Zenaga", "zen", None),
+    (u"Zhuang", "zha", "za"),
+    (u"Zulu", "zul", "zu"),
+    (u"Zuni", "zun", None),
+)
+
+# Bibliographic ISO-639-2 form (eg. "fre" => "French")
+ISO639_2 = {}
+for line in _ISO639:
+    for key in line[1].split("/"):
+        ISO639_2[key] = line[0]
+del _ISO639
+
diff --git a/lib/hachoir_core/language.py b/lib/hachoir_core/language.py
new file mode 100644
index 0000000000000000000000000000000000000000..997f7a61d981cbab80150404e2ccbcbc2ff6e201
--- /dev/null
+++ b/lib/hachoir_core/language.py
@@ -0,0 +1,23 @@
+from hachoir_core.iso639 import ISO639_2
+
+class Language:
+    def __init__(self, code):
+        code = str(code)
+        if code not in ISO639_2:
+            raise ValueError("Invalid language code: %r" % code)
+        self.code = code
+
+    def __cmp__(self, other):
+        if other.__class__ != Language:
+            return 1
+        return cmp(self.code, other.code)
+
+    def __unicode__(self):
+       return ISO639_2[self.code]
+
+    def __str__(self):
+       return self.__unicode__()
+
+    def __repr__(self):
+        return "<Language '%s', code=%r>" % (unicode(self), self.code)
+
diff --git a/lib/hachoir_core/log.py b/lib/hachoir_core/log.py
new file mode 100644
index 0000000000000000000000000000000000000000..32fca06d03b3722251a71c79ea67aeecd4731d6a
--- /dev/null
+++ b/lib/hachoir_core/log.py
@@ -0,0 +1,144 @@
+import os, sys, time
+import hachoir_core.config as config
+from hachoir_core.i18n import _
+
+class Log:
+    LOG_INFO   = 0
+    LOG_WARN   = 1
+    LOG_ERROR  = 2
+
+    level_name = {
+        LOG_WARN: "[warn]",
+        LOG_ERROR: "[err!]",
+        LOG_INFO: "[info]"
+    }
+
+    def __init__(self):
+        self.__buffer = {}
+        self.__file = None
+        self.use_print = True
+        self.use_buffer = False
+        self.on_new_message = None # Prototype: def func(level, prefix, text, context)
+
+    def shutdown(self):
+        if self.__file:
+            self._writeIntoFile(_("Stop Hachoir"))
+
+    def setFilename(self, filename, append=True):
+        """
+        Use a file to store all messages. The
+        UTF-8 encoding will be used. Write an informative
+        message if the file can't be created.
+
+        @param filename: C{L{string}}
+        """
+
+        # Look if file already exists or not
+        filename = os.path.expanduser(filename)
+        filename = os.path.realpath(filename)
+        append = os.access(filename, os.F_OK)
+
+        # Create log file (or open it in append mode, if it already exists)
+        try:
+            import codecs
+            if append:
+                self.__file = codecs.open(filename, "a", "utf-8")
+            else:
+                self.__file = codecs.open(filename, "w", "utf-8")
+            self._writeIntoFile(_("Starting Hachoir"))
+        except IOError, err:
+            if err.errno == 2:
+                self.__file = None
+                self.info(_("[Log] setFilename(%s) fails: no such file") % filename)
+            else:
+                raise
+
+    def _writeIntoFile(self, message):
+        timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
+        self.__file.write(u"%s - %s\n" % (timestamp, message))
+        self.__file.flush()
+
+    def newMessage(self, level, text, ctxt=None):
+        """
+        Write a new message : append it in the buffer,
+        display it to the screen (if needed), and write
+        it in the log file (if needed).
+
+        @param level: Message level.
+        @type level: C{int}
+        @param text: Message content.
+        @type text: C{str}
+        @param ctxt: The caller instance.
+        """
+
+        if level < self.LOG_ERROR and config.quiet or \
+           level <= self.LOG_INFO and not config.verbose:
+            return
+        if config.debug:
+            from hachoir_core.error import getBacktrace
+            backtrace = getBacktrace(None)
+            if backtrace:
+                text += "\n\n" + backtrace
+
+        _text = text
+        if hasattr(ctxt, "_logger"):
+            _ctxt = ctxt._logger()
+            if _ctxt is not None:
+                text = "[%s] %s" % (_ctxt, text)
+
+        # Add message to log buffer
+        if self.use_buffer:
+            if not self.__buffer.has_key(level):
+                self.__buffer[level] = [text]
+            else:
+                self.__buffer[level].append(text)
+
+        # Add prefix
+        prefix = self.level_name.get(level, "[info]")
+
+        # Display on stdout (if used)
+        if self.use_print:
+            sys.stdout.flush()
+            sys.stderr.write("%s %s\n" % (prefix, text))
+            sys.stderr.flush()
+
+        # Write into outfile (if used)
+        if self.__file:
+            self._writeIntoFile("%s %s" % (prefix, text))
+
+        # Use callback (if used)
+        if self.on_new_message:
+            self.on_new_message (level, prefix, _text, ctxt)
+
+    def info(self, text):
+        """
+        New informative message.
+        @type text: C{str}
+        """
+        self.newMessage(Log.LOG_INFO, text)
+
+    def warning(self, text):
+        """
+        New warning message.
+        @type text: C{str}
+        """
+        self.newMessage(Log.LOG_WARN, text)
+
+    def error(self, text):
+        """
+        New error message.
+        @type text: C{str}
+        """
+        self.newMessage(Log.LOG_ERROR, text)
+
+log = Log()
+
+class Logger(object):
+    def _logger(self):
+        return "<%s>" % self.__class__.__name__
+    def info(self, text):
+        log.newMessage(Log.LOG_INFO, text, self)
+    def warning(self, text):
+        log.newMessage(Log.LOG_WARN, text, self)
+    def error(self, text):
+        log.newMessage(Log.LOG_ERROR, text, self)
diff --git a/lib/hachoir_core/memory.py b/lib/hachoir_core/memory.py
new file mode 100644
index 0000000000000000000000000000000000000000..54425f83b36186a0a22cac6aae4d756e7c48ed25
--- /dev/null
+++ b/lib/hachoir_core/memory.py
@@ -0,0 +1,99 @@
+import gc
+
+#---- Default implementation when resource is missing ----------------------
+PAGE_SIZE = 4096
+
+def getMemoryLimit():
+    """
+    Get current memory limit in bytes.
+
+    Return None on error.
+    """
+    return None
+
+def setMemoryLimit(max_mem):
+    """
+    Set memory limit in bytes.
+    Use value 'None' to disable memory limit.
+
+    Return True if limit is set, False on error.
+    """
+    return False
+
+def getMemorySize():
+    """
+    Read currenet process memory size: size of available virtual memory.
+    This value is NOT the real memory usage.
+
+    This function only works on Linux (use /proc/self/statm file).
+    """
+    try:
+        statm = open('/proc/self/statm').readline().split()
+    except IOError:
+        return None
+    return int(statm[0]) * PAGE_SIZE
+
+def clearCaches():
+    """
+    Try to clear all caches: call gc.collect() (Python garbage collector).
+    """
+    gc.collect()
+    #import re; re.purge()
+
+try:
+#---- 'resource' implementation ---------------------------------------------
+    from resource import getpagesize, getrlimit, setrlimit, RLIMIT_AS
+
+    PAGE_SIZE = getpagesize()
+
+    def getMemoryLimit():
+        try:
+            limit = getrlimit(RLIMIT_AS)[0]
+            if 0 < limit:
+                limit *= PAGE_SIZE
+            return limit
+        except ValueError:
+            return None
+
+    def setMemoryLimit(max_mem):
+        if max_mem is None:
+            max_mem = -1
+        try:
+            setrlimit(RLIMIT_AS, (max_mem, -1))
+            return True
+        except ValueError:
+            return False
+except ImportError:
+    pass
+
+def limitedMemory(limit, func, *args, **kw):
+    """
+    Limit memory grow when calling func(*args, **kw):
+    restrict memory grow to 'limit' bytes.
+
+    Use try/except MemoryError to catch the error.
+    """
+    # First step: clear cache to gain memory
+    clearCaches()
+
+    # Get total program size
+    max_rss = getMemorySize()
+    if max_rss is not None:
+        # Get old limit and then set our new memory limit
+        old_limit = getMemoryLimit()
+        limit = max_rss + limit
+        limited = setMemoryLimit(limit)
+    else:
+        limited = False
+
+    try:
+        # Call function
+        return func(*args, **kw)
+    finally:
+        # and unset our memory limit
+        if limited:
+            setMemoryLimit(old_limit)
+
+        # After calling the function: clear all caches
+        clearCaches()
+
diff --git a/lib/hachoir_core/profiler.py b/lib/hachoir_core/profiler.py
new file mode 100644
index 0000000000000000000000000000000000000000..eabc575c47c4ee9ba852230940ad7738341a20f9
--- /dev/null
+++ b/lib/hachoir_core/profiler.py
@@ -0,0 +1,31 @@
+from hotshot import Profile
+from hotshot.stats import load as loadStats
+from os import unlink
+
+def runProfiler(func, args=tuple(), kw={}, verbose=True, nb_func=25, sort_by=('cumulative', 'calls')):
+    profile_filename = "/tmp/profiler"
+    prof = Profile(profile_filename)
+    try:
+        if verbose:
+            print "[+] Run profiler"
+        result = prof.runcall(func, *args, **kw)
+        prof.close()
+        if verbose:
+            print "[+] Stop profiler"
+            print "[+] Process data..."
+        stat = loadStats(profile_filename)
+        if verbose:
+            print "[+] Strip..."
+        stat.strip_dirs()
+        if verbose:
+            print "[+] Sort data..."
+        stat.sort_stats(*sort_by)
+        if verbose:
+            print
+            print "[+] Display statistics"
+            print
+        stat.print_stats(nb_func)
+        return result
+    finally:
+        unlink(profile_filename)
+
diff --git a/lib/hachoir_core/stream/__init__.py b/lib/hachoir_core/stream/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..163e12a3de702133a1f9dd4ab97f0e642a490555
--- /dev/null
+++ b/lib/hachoir_core/stream/__init__.py
@@ -0,0 +1,11 @@
+from hachoir_core.endian import BIG_ENDIAN, LITTLE_ENDIAN
+from hachoir_core.stream.stream import StreamError
+from hachoir_core.stream.input import (
+        InputStreamError,
+        InputStream, InputIOStream, StringInputStream,
+        InputSubStream, InputFieldStream,
+        FragmentedStream, ConcatStream)
+from hachoir_core.stream.input_helper import FileInputStream, guessStreamCharset
+from hachoir_core.stream.output import (OutputStreamError,
+        FileOutputStream, StringOutputStream, OutputStream)
+
diff --git a/lib/hachoir_core/stream/input.py b/lib/hachoir_core/stream/input.py
new file mode 100644
index 0000000000000000000000000000000000000000..ec2c27c1a2332c7ec19f4ed06e076a0773d8592d
--- /dev/null
+++ b/lib/hachoir_core/stream/input.py
@@ -0,0 +1,563 @@
+from hachoir_core.endian import BIG_ENDIAN, LITTLE_ENDIAN
+from hachoir_core.error import info
+from hachoir_core.log import Logger
+from hachoir_core.bits import str2long
+from hachoir_core.i18n import getTerminalCharset
+from hachoir_core.tools import lowerBound
+from hachoir_core.i18n import _
+from os import dup, fdopen
+from errno import ESPIPE
+from weakref import ref as weakref_ref
+from hachoir_core.stream import StreamError
+
+class InputStreamError(StreamError):
+    pass
+
+class ReadStreamError(InputStreamError):
+    def __init__(self, size, address, got=None):
+        self.size = size
+        self.address = address
+        self.got = got
+        if self.got is not None:
+            msg = _("Can't read %u bits at address %u (got %u bits)") % (self.size, self.address, self.got)
+        else:
+            msg = _("Can't read %u bits at address %u") % (self.size, self.address)
+        InputStreamError.__init__(self, msg)
+
+class NullStreamError(InputStreamError):
+    def __init__(self, source):
+        self.source = source
+        msg = _("Input size is nul (source='%s')!") % self.source
+        InputStreamError.__init__(self, msg)
+
+class FileFromInputStream:
+    _offset = 0
+    _from_end = False
+
+    def __init__(self, stream):
+        self.stream = stream
+        self._setSize(stream.askSize(self))
+
+    def _setSize(self, size):
+        if size is None:
+            self._size = size
+        elif size % 8:
+            raise InputStreamError("Invalid size")
+        else:
+            self._size = size // 8
+
+    def tell(self):
+        if self._from_end:
+            while self._size is None:
+                self.stream._feed(max(self.stream._current_size << 1, 1 << 16))
+            self._from_end = False
+            self._offset += self._size
+        return self._offset
+
+    def seek(self, pos, whence=0):
+        if whence == 0:
+            self._from_end = False
+            self._offset = pos
+        elif whence == 1:
+            self._offset += pos
+        elif whence == 2:
+            self._from_end = True
+            self._offset = pos
+        else:
+            raise ValueError("seek() second argument must be 0, 1 or 2")
+
+    def read(self, size=None):
+        def read(address, size):
+            shift, data, missing = self.stream.read(8 * address, 8 * size)
+            if shift:
+                raise InputStreamError("TODO: handle non-byte-aligned data")
+            return data
+        if self._size or size is not None and not self._from_end:
+            # We don't want self.tell() to read anything
+            # and the size must be known if we read until the end.
+            pos = self.tell()
+            if size is None or None < self._size < pos + size:
+                size = self._size - pos
+            if size <= 0:
+                return ''
+            data = read(pos, size)
+            self._offset += len(data)
+            return data
+        elif self._from_end:
+            # TODO: not tested
+            max_size = - self._offset
+            if size is None or max_size < size:
+                size = max_size
+            if size <= 0:
+                return ''
+            data = '', ''
+            self._offset = max(0, self.stream._current_size // 8 + self._offset)
+            self._from_end = False
+            bs = max(max_size, 1 << 16)
+            while True:
+                d = read(self._offset, bs)
+                data = data[1], d
+                self._offset += len(d)
+                if self._size:
+                    bs = self._size - self._offset
+                    if not bs:
+                        data = data[0] + data[1]
+                        d = len(data) - max_size
+                        return data[d:d+size]
+        else:
+            # TODO: not tested
+            data = [ ]
+            size = 1 << 16
+            while True:
+                d = read(self._offset, size)
+                data.append(d)
+                self._offset += len(d)
+                if self._size:
+                    size = self._size - self._offset
+                    if not size:
+                        return ''.join(data)
+
+
+class InputStream(Logger):
+    _set_size = None
+    _current_size = 0
+
+    def __init__(self, source=None, size=None, packets=None, **args):
+        self.source = source
+        self._size = size   # in bits
+        if size == 0:
+            raise NullStreamError(source)
+        self.tags = tuple(args.get("tags", tuple()))
+        self.packets = packets
+
+    def askSize(self, client):
+        if self._size != self._current_size:
+            if self._set_size is None:
+                self._set_size = []
+            self._set_size.append(weakref_ref(client))
+        return self._size
+
+    def _setSize(self, size=None):
+        assert self._size is None or self._current_size <= self._size
+        if self._size != self._current_size:
+            self._size = self._current_size
+            if not self._size:
+                raise NullStreamError(self.source)
+            if self._set_size:
+                for client in self._set_size:
+                    client = client()
+                    if client:
+                        client._setSize(self._size)
+                del self._set_size
+
+    size = property(lambda self: self._size, doc="Size of the stream in bits")
+    checked = property(lambda self: self._size == self._current_size)
+
+    def sizeGe(self, size, const=False):
+        return self._current_size >= size or \
+            not (None < self._size < size or const or self._feed(size))
+
+    def _feed(self, size):
+        return self.read(size-1,1)[2]
+
+    def read(self, address, size):
+        """
+        Read 'size' bits at position 'address' (in bits)
+        from the beginning of the stream.
+        """
+        raise NotImplementedError
+
+    def readBits(self, address, nbits, endian):
+        assert endian in (BIG_ENDIAN, LITTLE_ENDIAN)
+
+        shift, data, missing = self.read(address, nbits)
+        if missing:
+            raise ReadStreamError(nbits, address)
+        value = str2long(data, endian)
+        if endian is BIG_ENDIAN:
+            value >>= len(data) * 8 - shift - nbits
+        else:
+            value >>= shift
+        return value & (1 << nbits) - 1
+
+    def readInteger(self, address, signed, nbits, endian):
+        """ Read an integer number """
+        value = self.readBits(address, nbits, endian)
+
+        # Signe number. Example with nbits=8:
+        # if 128 <= value: value -= 256
+        if signed and (1 << (nbits-1)) <= value:
+            value -= (1 << nbits)
+        return value
+
+    def readBytes(self, address, nb_bytes):
+        shift, data, missing = self.read(address, 8 * nb_bytes)
+        if shift:
+            raise InputStreamError("TODO: handle non-byte-aligned data")
+        if missing:
+            raise ReadStreamError(8 * nb_bytes, address)
+        return data
+
+    def searchBytesLength(self, needle, include_needle,
+    start_address=0, end_address=None):
+        """
+        If include_needle is True, add its length to the result.
+        Returns None is needle can't be found.
+        """
+
+        pos = self.searchBytes(needle, start_address, end_address)
+        if pos is None:
+            return None
+        length = (pos - start_address) // 8
+        if include_needle:
+            length += len(needle)
+        return length
+
+    def searchBytes(self, needle, start_address=0, end_address=None):
+        """
+        Search some bytes in [start_address;end_address[. Addresses must
+        be aligned to byte. Returns the address of the bytes if found,
+        None else.
+        """
+        if start_address % 8:
+            raise InputStreamError("Unable to search bytes with address with bit granularity")
+        length = len(needle)
+        size = max(3 * length, 4096)
+        buffer = ''
+
+        if self._size and (end_address is None or self._size < end_address):
+            end_address = self._size
+
+        while True:
+            if end_address is not None:
+                todo = (end_address - start_address) >> 3
+                if todo < size:
+                    if todo <= 0:
+                        return None
+                    size = todo
+            data = self.readBytes(start_address, size)
+            if end_address is None and self._size:
+                end_address = self._size
+                size = (end_address - start_address) >> 3
+                assert size > 0
+                data = data[:size]
+            start_address += 8 * size
+            buffer = buffer[len(buffer) - length + 1:] + data
+            found = buffer.find(needle)
+            if found >= 0:
+                return start_address + (found - len(buffer)) * 8
+
+    def file(self):
+        return FileFromInputStream(self)
+
+
+class InputPipe(object):
+    """
+    InputPipe makes input streams seekable by caching a certain
+    amount of data. The memory usage may be unlimited in worst cases.
+    A function (set_size) is called when the size of the stream is known.
+
+    InputPipe sees the input stream as an array of blocks of
+    size = (2 ^ self.buffer_size) and self.buffers maps to this array.
+    It also maintains a circular ordered list of non-discarded blocks,
+    sorted by access time.
+
+    Each element of self.buffers is an array of 3 elements:
+     * self.buffers[i][0] is the data.
+       len(self.buffers[i][0]) == 1 << self.buffer_size
+       (except at the end: the length may be smaller)
+     * self.buffers[i][1] is the index of a more recently used block
+     * self.buffers[i][2] is the opposite of self.buffers[1],
+       in order to have a double-linked list.
+    For any discarded block, self.buffers[i] = None
+
+    self.last is the index of the most recently accessed block.
+    self.first is the first (= smallest index) non-discarded block.
+
+    How InputPipe discards blocks:
+     * Just before returning from the read method.
+     * Only if there are more than self.buffer_nb_min blocks in memory.
+     * While self.buffers[self.first] is that least recently used block.
+
+    Property: There is no hole in self.buffers, except at the beginning.
+    """
+    buffer_nb_min = 256
+    buffer_size = 16
+    last = None
+    size = None
+
+    def __init__(self, input, set_size=None):
+        self._input = input
+        self.first = self.address = 0
+        self.buffers = []
+        self.set_size = set_size
+
+    current_size = property(lambda self: len(self.buffers) << self.buffer_size)
+
+    def _append(self, data):
+        if self.last is None:
+            self.last = next = prev = 0
+        else:
+            prev = self.last
+            last = self.buffers[prev]
+            next = last[1]
+            self.last = self.buffers[next][2] = last[1] = len(self.buffers)
+        self.buffers.append([ data, next, prev ])
+
+    def _get(self, index):
+        if index >= len(self.buffers):
+            return ''
+        buf = self.buffers[index]
+        if buf is None:
+            raise InputStreamError(_("Error: Buffers too small. Can't seek backward."))
+        if self.last != index:
+            next = buf[1]
+            prev = buf[2]
+            self.buffers[next][2] = prev
+            self.buffers[prev][1] = next
+            first = self.buffers[self.last][1]
+            buf[1] = first
+            buf[2] = self.last
+            self.buffers[first][2] = index
+            self.buffers[self.last][1] = index
+            self.last = index
+        return buf[0]
+
+    def _flush(self):
+        lim = len(self.buffers) - self.buffer_nb_min
+        while self.first < lim:
+            buf = self.buffers[self.first]
+            if buf[2] != self.last:
+                break
+            info("Discarding buffer %u." % self.first)
+            self.buffers[self.last][1] = buf[1]
+            self.buffers[buf[1]][2] = self.last
+            self.buffers[self.first] = None
+            self.first += 1
+
+    def seek(self, address):
+        assert 0 <= address
+        self.address = address
+
+    def read(self, size):
+        end = self.address + size
+        for i in xrange(len(self.buffers), (end >> self.buffer_size) + 1):
+            data = self._input.read(1 << self.buffer_size)
+            if len(data) < 1 << self.buffer_size:
+                self.size = (len(self.buffers) << self.buffer_size) + len(data)
+                if self.set_size:
+                    self.set_size(self.size)
+                if data:
+                    self._append(data)
+                break
+            self._append(data)
+        block, offset = divmod(self.address, 1 << self.buffer_size)
+        data = ''.join(self._get(index)
+                for index in xrange(block, (end - 1 >> self.buffer_size) + 1)
+            )[offset:offset+size]
+        self._flush()
+        self.address += len(data)
+        return data
+
+class InputIOStream(InputStream):
+    def __init__(self, input, size=None, **args):
+        if not hasattr(input, "seek"):
+            if size is None:
+                input = InputPipe(input, self._setSize)
+            else:
+                input = InputPipe(input)
+        elif size is None:
+            try:
+                input.seek(0, 2)
+                size = input.tell() * 8
+            except IOError, err:
+                if err.errno == ESPIPE:
+                    input = InputPipe(input, self._setSize)
+                else:
+                    charset = getTerminalCharset()
+                    errmsg = unicode(str(err), charset)
+                    source = args.get("source", "<inputio:%r>" % input)
+                    raise InputStreamError(_("Unable to get size of %s: %s") % (source, errmsg))
+        self._input = input
+        InputStream.__init__(self, size=size, **args)
+
+    def __current_size(self):
+        if self._size:
+            return self._size
+        if self._input.size:
+            return 8 * self._input.size
+        return 8 * self._input.current_size
+    _current_size = property(__current_size)
+
+    def read(self, address, size):
+        assert size > 0
+        _size = self._size
+        address, shift = divmod(address, 8)
+        self._input.seek(address)
+        size = (size + shift + 7) >> 3
+        data = self._input.read(size)
+        got = len(data)
+        missing = size != got
+        if missing and _size == self._size:
+            raise ReadStreamError(8 * size, 8 * address, 8 * got)
+        return shift, data, missing
+
+    def file(self):
+        if hasattr(self._input, "fileno"):
+            new_fd = dup(self._input.fileno())
+            new_file = fdopen(new_fd, "r")
+            new_file.seek(0)
+            return new_file
+        return InputStream.file(self)
+
+
+class StringInputStream(InputStream):
+    def __init__(self, data, source="<string>", **args):
+        self.data = data
+        InputStream.__init__(self, source=source, size=8*len(data), **args)
+        self._current_size = self._size
+
+    def read(self, address, size):
+        address, shift = divmod(address, 8)
+        size = (size + shift + 7) >> 3
+        data = self.data[address:address+size]
+        got = len(data)
+        if got != size:
+            raise ReadStreamError(8 * size, 8 * address, 8 * got)
+        return shift, data, False
+
+
+class InputSubStream(InputStream):
+    def __init__(self, stream, offset, size=None, source=None, **args):
+        if offset is None:
+            offset = 0
+        if size is None and stream.size is not None:
+            size = stream.size - offset
+        if None < size <= 0:
+            raise ValueError("InputSubStream: offset is outside input stream")
+        self.stream = stream
+        self._offset = offset
+        if source is None:
+            source = "<substream input=%s offset=%s size=%s>" % (stream.source, offset, size)
+        InputStream.__init__(self, source=source, size=size, **args)
+        self.stream.askSize(self)
+
+    _current_size = property(lambda self: min(self._size, max(0, self.stream._current_size - self._offset)))
+
+    def read(self, address, size):
+        return self.stream.read(self._offset + address, size)
+
+def InputFieldStream(field, **args):
+    if not field.parent:
+        return field.stream
+    stream = field.parent.stream
+    args["size"] = field.size
+    args.setdefault("source", stream.source + field.path)
+    return InputSubStream(stream, field.absolute_address, **args)
+
+
+class FragmentedStream(InputStream):
+    def __init__(self, field, **args):
+        self.stream = field.parent.stream
+        data = field.getData()
+        self.fragments = [ (0, data.absolute_address, data.size) ]
+        self.next = field.next
+        args.setdefault("source", "%s%s" % (self.stream.source, field.path))
+        InputStream.__init__(self, **args)
+        if not self.next:
+            self._current_size = data.size
+            self._setSize()
+
+    def _feed(self, end):
+        if self._current_size < end:
+            if self.checked:
+                raise ReadStreamError(end - self._size, self._size)
+            a, fa, fs = self.fragments[-1]
+            while self.stream.sizeGe(fa + min(fs, end - a)):
+                a += fs
+                f = self.next
+                if a >= end:
+                    self._current_size = end
+                    if a == end and not f:
+                        self._setSize()
+                    return False
+                if f:
+                    self.next = f.next
+                    f = f.getData()
+                if not f:
+                    self._current_size = a
+                    self._setSize()
+                    return True
+                fa = f.absolute_address
+                fs = f.size
+                self.fragments += [ (a, fa, fs) ]
+            self._current_size = a + max(0, self.stream.size - fa)
+            self._setSize()
+            return True
+        return False
+
+    def read(self, address, size):
+        assert size > 0
+        missing = self._feed(address + size)
+        if missing:
+            size = self._size - address
+            if size <= 0:
+                return 0, '', True
+        d = []
+        i = lowerBound(self.fragments, lambda x: x[0] <= address)
+        a, fa, fs = self.fragments[i-1]
+        a -= address
+        fa -= a
+        fs += a
+        s = None
+        while True:
+            n = min(fs, size)
+            u, v, w = self.stream.read(fa, n)
+            assert not w
+            if s is None:
+                s = u
+            else:
+                assert not u
+            d += [ v ]
+            size -= n
+            if not size:
+                return s, ''.join(d), missing
+            a, fa, fs = self.fragments[i]
+            i += 1
+
+
+class ConcatStream(InputStream):
+    # TODO: concatene any number of any type of stream
+    def __init__(self, streams, **args):
+        if len(streams) > 2 or not streams[0].checked:
+            raise NotImplementedError
+        self.__size0 = streams[0].size
+        size1 = streams[1].askSize(self)
+        if size1 is not None:
+            args["size"] = self.__size0 + size1
+        self.__streams = streams
+        InputStream.__init__(self, **args)
+
+    _current_size = property(lambda self: self.__size0 + self.__streams[1]._current_size)
+
+    def read(self, address, size):
+        _size = self._size
+        s = self.__size0 - address
+        shift, data, missing = None, '', False
+        if s > 0:
+            s = min(size, s)
+            shift, data, w = self.__streams[0].read(address, s)
+            assert not w
+            a, s = 0, size - s
+        else:
+            a, s = -s, size
+        if s:
+            u, v, missing = self.__streams[1].read(a, s)
+            if missing and _size == self._size:
+                raise ReadStreamError(s, a)
+            if shift is None:
+                shift = u
+            else:
+                assert not u
+            data += v
+        return shift, data, missing
diff --git a/lib/hachoir_core/stream/input_helper.py b/lib/hachoir_core/stream/input_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..e793831029f6f6508ab749e772530e6994b1d929
--- /dev/null
+++ b/lib/hachoir_core/stream/input_helper.py
@@ -0,0 +1,38 @@
+from hachoir_core.i18n import getTerminalCharset, guessBytesCharset, _
+from hachoir_core.stream import InputIOStream, InputSubStream, InputStreamError
+
+def FileInputStream(filename, real_filename=None, **args):
+    """
+    Create an input stream of a file. filename must be unicode.
+
+    real_filename is an optional argument used to specify the real filename,
+    its type can be 'str' or 'unicode'. Use real_filename when you are
+    not able to convert filename to real unicode string (ie. you have to
+    use unicode(name, 'replace') or unicode(name, 'ignore')).
+    """
+    assert isinstance(filename, unicode)
+    if not real_filename:
+        real_filename = filename
+    try:
+        inputio = open(real_filename, 'rb')
+    except IOError, err:
+        charset = getTerminalCharset()
+        errmsg = unicode(str(err), charset)
+        raise InputStreamError(_("Unable to open file %s: %s") % (filename, errmsg))
+    source = "file:" + filename
+    offset = args.pop("offset", 0)
+    size = args.pop("size", None)
+    if offset or size:
+        if size:
+            size = 8 * size
+        stream = InputIOStream(inputio, source=source, **args)
+        return InputSubStream(stream, 8 * offset, size, **args)
+    else:
+        args.setdefault("tags",[]).append(("filename", filename))
+        return InputIOStream(inputio, source=source, **args)
+
+def guessStreamCharset(stream, address, size, default=None):
+    size = min(size, 1024*8)
+    bytes = stream.readBytes(address, size//8)
+    return guessBytesCharset(bytes, default)
+
diff --git a/lib/hachoir_core/stream/output.py b/lib/hachoir_core/stream/output.py
new file mode 100644
index 0000000000000000000000000000000000000000..e31637d699a9b5993f53159a63c07b13d9c3ba3f
--- /dev/null
+++ b/lib/hachoir_core/stream/output.py
@@ -0,0 +1,173 @@
+from cStringIO import StringIO
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.bits import long2raw
+from hachoir_core.stream import StreamError
+from errno import EBADF
+
+MAX_READ_NBYTES = 2 ** 16
+
+class OutputStreamError(StreamError):
+    pass
+
+class OutputStream(object):
+    def __init__(self, output, filename=None):
+        self._output = output
+        self._filename = filename
+        self._bit_pos = 0
+        self._byte = 0
+
+    def _getFilename(self):
+        return self._filename
+    filename = property(_getFilename)
+
+    def writeBit(self, state, endian):
+        if self._bit_pos == 7:
+            self._bit_pos = 0
+            if state:
+                if endian is BIG_ENDIAN:
+                    self._byte |= 1
+                else:
+                    self._byte |= 128
+            self._output.write(chr(self._byte))
+            self._byte = 0
+        else:
+            if state:
+                if endian is BIG_ENDIAN:
+                    self._byte |= (1 << self._bit_pos)
+                else:
+                    self._byte |= (1 << (7-self._bit_pos))
+            self._bit_pos += 1
+
+    def writeBits(self, count, value, endian):
+        assert 0 <= value < 2**count
+
+        # Feed bits to align to byte address
+        if self._bit_pos != 0:
+            n = 8 - self._bit_pos
+            if n <= count:
+                count -= n
+                if endian is BIG_ENDIAN:
+                    self._byte |= (value >> count)
+                    value &= ((1 << count) - 1)
+                else:
+                    self._byte |= (value & ((1 << n)-1)) << self._bit_pos
+                    value >>= n
+                self._output.write(chr(self._byte))
+                self._bit_pos = 0
+                self._byte = 0
+            else:
+                if endian is BIG_ENDIAN:
+                    self._byte |= (value << (8-self._bit_pos-count))
+                else:
+                    self._byte |= (value << self._bit_pos)
+                self._bit_pos += count
+                return
+
+        # Write byte per byte
+        while 8 <= count:
+            count -= 8
+            if endian is BIG_ENDIAN:
+                byte = (value >> count)
+                value &= ((1 << count) - 1)
+            else:
+                byte = (value & 0xFF)
+                value >>= 8
+            self._output.write(chr(byte))
+
+        # Keep last bits
+        assert 0 <= count < 8
+        self._bit_pos = count
+        if 0 < count:
+            assert 0 <= value < 2**count
+            if endian is BIG_ENDIAN:
+                self._byte = value << (8-count)
+            else:
+                self._byte = value
+        else:
+            assert value == 0
+            self._byte = 0
+
+    def writeInteger(self, value, signed, size_byte, endian):
+        if signed:
+            value += 1 << (size_byte*8 - 1)
+        raw = long2raw(value, endian, size_byte)
+        self.writeBytes(raw)
+
+    def copyBitsFrom(self, input, address, nb_bits, endian):
+        if (nb_bits % 8) == 0:
+            self.copyBytesFrom(input, address, nb_bits/8)
+        else:
+            # Arbitrary limit (because we should use a buffer, like copyBytesFrom(),
+            # but with endianess problem
+            assert nb_bits <= 128
+            data = input.readBits(address, nb_bits, endian)
+            self.writeBits(nb_bits, data, endian)
+
+    def copyBytesFrom(self, input, address, nb_bytes):
+        if (address % 8):
+            raise OutputStreamError("Unable to copy bytes with address with bit granularity")
+        buffer_size = 1 << 12   # 8192 (8 KB)
+        while 0 < nb_bytes:
+            # Compute buffer size
+            if nb_bytes < buffer_size:
+                buffer_size = nb_bytes
+
+            # Read
+            data = input.readBytes(address, buffer_size)
+
+            # Write
+            self.writeBytes(data)
+
+            # Move address
+            address += buffer_size*8
+            nb_bytes -= buffer_size
+
+    def writeBytes(self, bytes):
+        if self._bit_pos != 0:
+            raise NotImplementedError()
+        self._output.write(bytes)
+
+    def readBytes(self, address, nbytes):
+        """
+        Read bytes from the stream at specified address (in bits).
+        Address have to be a multiple of 8.
+        nbytes have to in 1..MAX_READ_NBYTES (64 KB).
+
+        This method is only supported for StringOuputStream (not on
+        FileOutputStream).
+
+        Return read bytes as byte string.
+        """
+        assert (address % 8) == 0
+        assert (1 <= nbytes <= MAX_READ_NBYTES)
+        self._output.flush()
+        oldpos = self._output.tell()
+        try:
+            self._output.seek(0)
+            try:
+                return self._output.read(nbytes)
+            except IOError, err:
+                if err[0] == EBADF:
+                    raise OutputStreamError("Stream doesn't support read() operation")
+        finally:
+            self._output.seek(oldpos)
+
+def StringOutputStream():
+    """
+    Create an output stream into a string.
+    """
+    data = StringIO()
+    return OutputStream(data)
+
+def FileOutputStream(filename, real_filename=None):
+    """
+    Create an output stream into file with given name.
+
+    Filename have to be unicode, whereas (optional) real_filename can be str.
+    """
+    assert isinstance(filename, unicode)
+    if not real_filename:
+        real_filename = filename
+    output = open(real_filename, 'wb')
+    return OutputStream(output, filename=filename)
+
diff --git a/lib/hachoir_core/stream/stream.py b/lib/hachoir_core/stream/stream.py
new file mode 100644
index 0000000000000000000000000000000000000000..58c9aea83440aaa005adb75c7d9f18a618fb3dad
--- /dev/null
+++ b/lib/hachoir_core/stream/stream.py
@@ -0,0 +1,5 @@
+from hachoir_core.error import HachoirError
+
+class StreamError(HachoirError):
+    pass
+
diff --git a/lib/hachoir_core/text_handler.py b/lib/hachoir_core/text_handler.py
new file mode 100644
index 0000000000000000000000000000000000000000..e2c65f0a00ddf241e0dcefdd1384cc5558c3b7dd
--- /dev/null
+++ b/lib/hachoir_core/text_handler.py
@@ -0,0 +1,60 @@
+"""
+Utilities used to convert a field to human classic reprentation of data.
+"""
+
+from hachoir_core.tools import (
+    humanDuration, humanFilesize, alignValue,
+    durationWin64 as doDurationWin64,
+    deprecated)
+from types import FunctionType, MethodType
+from hachoir_core.field import Field
+
+def textHandler(field, handler):
+    assert isinstance(handler, (FunctionType, MethodType))
+    assert issubclass(field.__class__, Field)
+    field.createDisplay = lambda: handler(field)
+    return field
+
+def displayHandler(field, handler):
+    assert isinstance(handler, (FunctionType, MethodType))
+    assert issubclass(field.__class__, Field)
+    field.createDisplay = lambda: handler(field.value)
+    return field
+
+@deprecated("Use TimedeltaWin64 field type")
+def durationWin64(field):
+    """
+    Convert Windows 64-bit duration to string. The timestamp format is
+    a 64-bit number: number of 100ns. See also timestampWin64().
+
+    >>> durationWin64(type("", (), dict(value=2146280000, size=64)))
+    u'3 min 34 sec 628 ms'
+    >>> durationWin64(type("", (), dict(value=(1 << 64)-1, size=64)))
+    u'58494 years 88 days 5 hours'
+    """
+    assert hasattr(field, "value") and hasattr(field, "size")
+    assert field.size == 64
+    delta = doDurationWin64(field.value)
+    return humanDuration(delta)
+
+def filesizeHandler(field):
+    """
+    Format field value using humanFilesize()
+    """
+    return displayHandler(field, humanFilesize)
+
+def hexadecimal(field):
+    """
+    Convert an integer to hexadecimal in lower case. Returns unicode string.
+
+    >>> hexadecimal(type("", (), dict(value=412, size=16)))
+    u'0x019c'
+    >>> hexadecimal(type("", (), dict(value=0, size=32)))
+    u'0x00000000'
+    """
+    assert hasattr(field, "value") and hasattr(field, "size")
+    size = field.size
+    padding = alignValue(size, 4) // 4
+    pattern = u"0x%%0%ux" % padding
+    return pattern % field.value
+
diff --git a/lib/hachoir_core/timeout.py b/lib/hachoir_core/timeout.py
new file mode 100644
index 0000000000000000000000000000000000000000..d321419505e6cf445745452cbf95008c9b308d99
--- /dev/null
+++ b/lib/hachoir_core/timeout.py
@@ -0,0 +1,76 @@
+"""
+limitedTime(): set a timeout in seconds when calling a function,
+raise a Timeout error if time exceed.
+"""
+from math import ceil
+
+IMPLEMENTATION = None
+
+class Timeout(RuntimeError):
+    """
+    Timeout error, inherits from RuntimeError
+    """
+    pass
+
+def signalHandler(signum, frame):
+    """
+    Signal handler to catch timeout signal: raise Timeout exception.
+    """
+    raise Timeout("Timeout exceed!")
+
+def limitedTime(second, func, *args, **kw):
+    """
+    Call func(*args, **kw) with a timeout of second seconds.
+    """
+    return func(*args, **kw)
+
+def fixTimeout(second):
+    """
+    Fix timeout value: convert to integer with a minimum of 1 second
+    """
+    if isinstance(second, float):
+        second = int(ceil(second))
+    assert isinstance(second, (int, long))
+    return max(second, 1)
+
+if not IMPLEMENTATION:
+    try:
+        from signal import signal, alarm, SIGALRM
+
+        # signal.alarm() implementation
+        def limitedTime(second, func, *args, **kw):
+            second = fixTimeout(second)
+            old_alarm = signal(SIGALRM, signalHandler)
+            try:
+                alarm(second)
+                return func(*args, **kw)
+            finally:
+                alarm(0)
+                signal(SIGALRM, old_alarm)
+
+        IMPLEMENTATION = "signal.alarm()"
+    except ImportError:
+        pass
+
+if not IMPLEMENTATION:
+    try:
+        from signal import signal, SIGXCPU
+        from resource import getrlimit, setrlimit, RLIMIT_CPU
+
+        # resource.setrlimit(RLIMIT_CPU) implementation
+        # "Bug": timeout is 'CPU' time so sleep() are not part of the timeout
+        def limitedTime(second, func, *args, **kw):
+            second = fixTimeout(second)
+            old_alarm = signal(SIGXCPU, signalHandler)
+            current = getrlimit(RLIMIT_CPU)
+            try:
+                setrlimit(RLIMIT_CPU, (second, current[1]))
+                return func(*args, **kw)
+            finally:
+                setrlimit(RLIMIT_CPU, current)
+                signal(SIGXCPU, old_alarm)
+
+        IMPLEMENTATION = "resource.setrlimit(RLIMIT_CPU)"
+    except ImportError:
+        pass
+
diff --git a/lib/hachoir_core/tools.py b/lib/hachoir_core/tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..30fa327ab87717aa0e8a2b1d666de94f486c7ccb
--- /dev/null
+++ b/lib/hachoir_core/tools.py
@@ -0,0 +1,582 @@
+# -*- coding: utf-8 -*-
+
+"""
+Various utilities.
+"""
+
+from hachoir_core.i18n import _, ngettext
+import re
+import stat
+from datetime import datetime, timedelta, MAXYEAR
+from warnings import warn
+
+def deprecated(comment=None):
+    """
+    This is a decorator which can be used to mark functions
+    as deprecated. It will result in a warning being emmitted
+    when the function is used.
+
+    Examples: ::
+
+       @deprecated
+       def oldfunc(): ...
+
+       @deprecated("use newfunc()!")
+       def oldfunc2(): ...
+
+    Code from: http://code.activestate.com/recipes/391367/
+    """
+    def _deprecated(func):
+        def newFunc(*args, **kwargs):
+            message = "Call to deprecated function %s" % func.__name__
+            if comment:
+                message += ": " + comment
+            warn(message, category=DeprecationWarning, stacklevel=2)
+            return func(*args, **kwargs)
+        newFunc.__name__ = func.__name__
+        newFunc.__doc__ = func.__doc__
+        newFunc.__dict__.update(func.__dict__)
+        return newFunc
+    return _deprecated
+
+def paddingSize(value, align):
+    """
+    Compute size of a padding field.
+
+    >>> paddingSize(31, 4)
+    1
+    >>> paddingSize(32, 4)
+    0
+    >>> paddingSize(33, 4)
+    3
+
+    Note: (value + paddingSize(value, align)) == alignValue(value, align)
+    """
+    if value % align != 0:
+        return align - (value % align)
+    else:
+        return 0
+
+def alignValue(value, align):
+    """
+    Align a value to next 'align' multiple.
+
+    >>> alignValue(31, 4)
+    32
+    >>> alignValue(32, 4)
+    32
+    >>> alignValue(33, 4)
+    36
+
+    Note: alignValue(value, align) == (value + paddingSize(value, align))
+    """
+
+    if value % align != 0:
+        return value + align - (value % align)
+    else:
+        return value
+
+def timedelta2seconds(delta):
+    """
+    Convert a datetime.timedelta() objet to a number of second
+    (floatting point number).
+
+    >>> timedelta2seconds(timedelta(seconds=2, microseconds=40000))
+    2.04
+    >>> timedelta2seconds(timedelta(minutes=1, milliseconds=250))
+    60.25
+    """
+    return delta.microseconds / 1000000.0 \
+        + delta.seconds + delta.days * 60*60*24
+
+def humanDurationNanosec(nsec):
+    """
+    Convert a duration in nanosecond to human natural representation.
+    Returns an unicode string.
+
+    >>> humanDurationNanosec(60417893)
+    u'60.42 ms'
+    """
+
+    # Nano second
+    if nsec < 1000:
+        return u"%u nsec" % nsec
+
+    # Micro seconds
+    usec, nsec = divmod(nsec, 1000)
+    if usec < 1000:
+        return u"%.2f usec" % (usec+float(nsec)/1000)
+
+    # Milli seconds
+    msec, usec = divmod(usec, 1000)
+    if msec < 1000:
+        return u"%.2f ms" % (msec + float(usec)/1000)
+    return humanDuration(msec)
+
+def humanDuration(delta):
+    """
+    Convert a duration in millisecond to human natural representation.
+    Returns an unicode string.
+
+    >>> humanDuration(0)
+    u'0 ms'
+    >>> humanDuration(213)
+    u'213 ms'
+    >>> humanDuration(4213)
+    u'4 sec 213 ms'
+    >>> humanDuration(6402309)
+    u'1 hour 46 min 42 sec'
+    """
+    if not isinstance(delta, timedelta):
+        delta = timedelta(microseconds=delta*1000)
+
+    # Milliseconds
+    text = []
+    if 1000 <= delta.microseconds:
+        text.append(u"%u ms" % (delta.microseconds//1000))
+
+    # Seconds
+    minutes, seconds = divmod(delta.seconds, 60)
+    hours, minutes = divmod(minutes, 60)
+    if seconds:
+        text.append(u"%u sec" % seconds)
+    if minutes:
+        text.append(u"%u min" % minutes)
+    if hours:
+        text.append(ngettext("%u hour", "%u hours", hours) % hours)
+
+    # Days
+    years, days = divmod(delta.days, 365)
+    if days:
+        text.append(ngettext("%u day", "%u days", days) % days)
+    if years:
+        text.append(ngettext("%u year", "%u years", years) % years)
+    if 3 < len(text):
+        text = text[-3:]
+    elif not text:
+        return u"0 ms"
+    return u" ".join(reversed(text))
+
+def humanFilesize(size):
+    """
+    Convert a file size in byte to human natural representation.
+    It uses the values: 1 KB is 1024 bytes, 1 MB is 1024 KB, etc.
+    The result is an unicode string.
+
+    >>> humanFilesize(1)
+    u'1 byte'
+    >>> humanFilesize(790)
+    u'790 bytes'
+    >>> humanFilesize(256960)
+    u'250.9 KB'
+    """
+    if size < 10000:
+        return ngettext("%u byte", "%u bytes", size) % size
+    units = [_("KB"), _("MB"), _("GB"), _("TB")]
+    size = float(size)
+    divisor = 1024
+    for unit in units:
+        size = size / divisor
+        if size < divisor:
+            return "%.1f %s" % (size, unit)
+    return "%u %s" % (size, unit)
+
+def humanBitSize(size):
+    """
+    Convert a size in bit to human classic representation.
+    It uses the values: 1 Kbit is 1000 bits, 1 Mbit is 1000 Kbit, etc.
+    The result is an unicode string.
+
+    >>> humanBitSize(1)
+    u'1 bit'
+    >>> humanBitSize(790)
+    u'790 bits'
+    >>> humanBitSize(256960)
+    u'257.0 Kbit'
+    """
+    divisor = 1000
+    if size < divisor:
+        return ngettext("%u bit", "%u bits", size) % size
+    units = [u"Kbit", u"Mbit", u"Gbit", u"Tbit"]
+    size = float(size)
+    for unit in units:
+        size = size / divisor
+        if size < divisor:
+            return "%.1f %s" % (size, unit)
+    return u"%u %s" % (size, unit)
+
+def humanBitRate(size):
+    """
+    Convert a bit rate to human classic representation. It uses humanBitSize()
+    to convert size into human reprensation. The result is an unicode string.
+
+    >>> humanBitRate(790)
+    u'790 bits/sec'
+    >>> humanBitRate(256960)
+    u'257.0 Kbit/sec'
+    """
+    return "".join((humanBitSize(size), "/sec"))
+
+def humanFrequency(hertz):
+    """
+    Convert a frequency in hertz to human classic representation.
+    It uses the values: 1 KHz is 1000 Hz, 1 MHz is 1000 KMhz, etc.
+    The result is an unicode string.
+
+    >>> humanFrequency(790)
+    u'790 Hz'
+    >>> humanFrequency(629469)
+    u'629.5 kHz'
+    """
+    divisor = 1000
+    if hertz < divisor:
+        return u"%u Hz" % hertz
+    units = [u"kHz", u"MHz", u"GHz", u"THz"]
+    hertz = float(hertz)
+    for unit in units:
+        hertz = hertz / divisor
+        if hertz < divisor:
+            return u"%.1f %s" % (hertz, unit)
+    return u"%s %s" % (hertz, unit)
+
+regex_control_code = re.compile(r"([\x00-\x1f\x7f])")
+controlchars = tuple({
+        # Don't use "\0", because "\0"+"0"+"1" = "\001" = "\1" (1 character)
+        # Same rease to not use octal syntax ("\1")
+        ord("\n"): r"\n",
+        ord("\r"): r"\r",
+        ord("\t"): r"\t",
+        ord("\a"): r"\a",
+        ord("\b"): r"\b",
+    }.get(code, '\\x%02x' % code)
+    for code in xrange(128)
+)
+
+def makePrintable(data, charset, quote=None, to_unicode=False, smart=True):
+    r"""
+    Prepare a string to make it printable in the specified charset.
+    It escapes control characters. Characters with code bigger than 127
+    are escaped if data type is 'str' or if charset is "ASCII".
+
+    Examples with Unicode:
+    >>> aged = unicode("âgé", "UTF-8")
+    >>> repr(aged)  # text type is 'unicode'
+    "u'\\xe2g\\xe9'"
+    >>> makePrintable("abc\0", "UTF-8")
+    'abc\\0'
+    >>> makePrintable(aged, "latin1")
+    '\xe2g\xe9'
+    >>> makePrintable(aged, "latin1", quote='"')
+    '"\xe2g\xe9"'
+
+    Examples with string encoded in latin1:
+    >>> aged_latin = unicode("âgé", "UTF-8").encode("latin1")
+    >>> repr(aged_latin)  # text type is 'str'
+    "'\\xe2g\\xe9'"
+    >>> makePrintable(aged_latin, "latin1")
+    '\\xe2g\\xe9'
+    >>> makePrintable("", "latin1")
+    ''
+    >>> makePrintable("a", "latin1", quote='"')
+    '"a"'
+    >>> makePrintable("", "latin1", quote='"')
+    '(empty)'
+    >>> makePrintable("abc", "latin1", quote="'")
+    "'abc'"
+
+    Control codes:
+    >>> makePrintable("\0\x03\x0a\x10 \x7f", "latin1")
+    '\\0\\3\\n\\x10 \\x7f'
+
+    Quote character may also be escaped (only ' and "):
+    >>> print makePrintable("a\"b", "latin-1", quote='"')
+    "a\"b"
+    >>> print makePrintable("a\"b", "latin-1", quote="'")
+    'a"b'
+    >>> print makePrintable("a'b", "latin-1", quote="'")
+    'a\'b'
+    """
+
+    if data:
+        if not isinstance(data, unicode):
+            data = unicode(data, "ISO-8859-1")
+            charset = "ASCII"
+        data = regex_control_code.sub(
+            lambda regs: controlchars[ord(regs.group(1))], data)
+        if quote:
+            if quote in "\"'":
+                data = data.replace(quote, '\\' + quote)
+            data = ''.join((quote, data, quote))
+    elif quote:
+        data = "(empty)"
+    data = data.encode(charset, "backslashreplace")
+    if smart:
+        # Replace \x00\x01 by \0\1
+        data = re.sub(r"\\x0([0-7])(?=[^0-7]|$)", r"\\\1", data)
+    if to_unicode:
+        data = unicode(data, charset)
+    return data
+
+def makeUnicode(text):
+    r"""
+    Convert text to printable Unicode string. For byte string (type 'str'),
+    use charset ISO-8859-1 for the conversion to Unicode
+
+    >>> makeUnicode(u'abc\0d')
+    u'abc\\0d'
+    >>> makeUnicode('a\xe9')
+    u'a\xe9'
+    """
+    if isinstance(text, str):
+        text = unicode(text, "ISO-8859-1")
+    elif not isinstance(text, unicode):
+        text = unicode(text)
+    text = regex_control_code.sub(
+        lambda regs: controlchars[ord(regs.group(1))], text)
+    text = re.sub(r"\\x0([0-7])(?=[^0-7]|$)", r"\\\1", text)
+    return text
+
+def binarySearch(seq, cmp_func):
+    """
+    Search a value in a sequence using binary search. Returns index of the
+    value, or None if the value doesn't exist.
+
+    'seq' have to be sorted in ascending order according to the
+    comparaison function ;
+
+    'cmp_func', prototype func(x), is the compare function:
+    - Return strictly positive value if we have to search forward ;
+    - Return strictly negative value if we have to search backward ;
+    - Otherwise (zero) we got the value.
+
+    >>> # Search number 5 (search forward)
+    ... binarySearch([0, 4, 5, 10], lambda x: 5-x)
+    2
+    >>> # Backward search
+    ... binarySearch([10, 5, 4, 0], lambda x: x-5)
+    1
+    """
+    lower = 0
+    upper = len(seq)
+    while lower < upper:
+        index = (lower + upper) >> 1
+        diff = cmp_func(seq[index])
+        if diff < 0:
+            upper = index
+        elif diff > 0:
+            lower = index + 1
+        else:
+            return index
+    return None
+
+def lowerBound(seq, cmp_func):
+    f = 0
+    l = len(seq)
+    while l > 0:
+        h = l >> 1
+        m = f + h
+        if cmp_func(seq[m]):
+            f = m
+            f += 1
+            l -= h + 1
+        else:
+            l = h
+    return f
+
+def humanUnixAttributes(mode):
+    """
+    Convert a Unix file attributes (or "file mode") to an unicode string.
+
+    Original source code:
+    http://cvs.savannah.gnu.org/viewcvs/coreutils/lib/filemode.c?root=coreutils
+
+    >>> humanUnixAttributes(0644)
+    u'-rw-r--r-- (644)'
+    >>> humanUnixAttributes(02755)
+    u'-rwxr-sr-x (2755)'
+    """
+
+    def ftypelet(mode):
+        if stat.S_ISREG (mode) or not stat.S_IFMT(mode):
+            return '-'
+        if stat.S_ISBLK (mode): return 'b'
+        if stat.S_ISCHR (mode): return 'c'
+        if stat.S_ISDIR (mode): return 'd'
+        if stat.S_ISFIFO(mode): return 'p'
+        if stat.S_ISLNK (mode): return 'l'
+        if stat.S_ISSOCK(mode): return 's'
+        return '?'
+
+    chars = [ ftypelet(mode), 'r', 'w', 'x', 'r', 'w', 'x', 'r', 'w', 'x' ]
+    for i in xrange(1, 10):
+        if not mode & 1 << 9 - i:
+            chars[i] = '-'
+    if mode & stat.S_ISUID:
+        if chars[3] != 'x':
+            chars[3] = 'S'
+        else:
+            chars[3] = 's'
+    if mode & stat.S_ISGID:
+        if chars[6] != 'x':
+            chars[6] = 'S'
+        else:
+            chars[6] = 's'
+    if mode & stat.S_ISVTX:
+        if chars[9] != 'x':
+            chars[9] = 'T'
+        else:
+            chars[9] = 't'
+    return u"%s (%o)" % (''.join(chars), mode)
+
+def createDict(data, index):
+    """
+    Create a new dictionnay from dictionnary key=>values:
+    just keep value number 'index' from all values.
+
+    >>> data={10: ("dix", 100, "a"), 20: ("vingt", 200, "b")}
+    >>> createDict(data, 0)
+    {10: 'dix', 20: 'vingt'}
+    >>> createDict(data, 2)
+    {10: 'a', 20: 'b'}
+    """
+    return dict( (key,values[index]) for key, values in data.iteritems() )
+
+# Start of UNIX timestamp (Epoch): 1st January 1970 at 00:00
+UNIX_TIMESTAMP_T0 = datetime(1970, 1, 1)
+
+def timestampUNIX(value):
+    """
+    Convert an UNIX (32-bit) timestamp to datetime object. Timestamp value
+    is the number of seconds since the 1st January 1970 at 00:00. Maximum
+    value is 2147483647: 19 january 2038 at 03:14:07.
+
+    May raise ValueError for invalid value: value have to be in 0..2147483647.
+
+    >>> timestampUNIX(0)
+    datetime.datetime(1970, 1, 1, 0, 0)
+    >>> timestampUNIX(1154175644)
+    datetime.datetime(2006, 7, 29, 12, 20, 44)
+    >>> timestampUNIX(1154175644.37)
+    datetime.datetime(2006, 7, 29, 12, 20, 44, 370000)
+    >>> timestampUNIX(2147483647)
+    datetime.datetime(2038, 1, 19, 3, 14, 7)
+    """
+    if not isinstance(value, (float, int, long)):
+        raise TypeError("timestampUNIX(): an integer or float is required")
+    if not(0 <= value <= 2147483647):
+        raise ValueError("timestampUNIX(): value have to be in 0..2147483647")
+    return UNIX_TIMESTAMP_T0 + timedelta(seconds=value)
+
+# Start of Macintosh timestamp: 1st January 1904 at 00:00
+MAC_TIMESTAMP_T0 = datetime(1904, 1, 1)
+
+def timestampMac32(value):
+    """
+    Convert an Mac (32-bit) timestamp to string. The format is the number
+    of seconds since the 1st January 1904 (to 2040). Returns unicode string.
+
+    >>> timestampMac32(0)
+    datetime.datetime(1904, 1, 1, 0, 0)
+    >>> timestampMac32(2843043290)
+    datetime.datetime(1994, 2, 2, 14, 14, 50)
+    """
+    if not isinstance(value, (float, int, long)):
+        raise TypeError("an integer or float is required")
+    if not(0 <= value <= 4294967295):
+        return _("invalid Mac timestamp (%s)") % value
+    return MAC_TIMESTAMP_T0 + timedelta(seconds=value)
+
+def durationWin64(value):
+    """
+    Convert Windows 64-bit duration to string. The timestamp format is
+    a 64-bit number: number of 100ns. See also timestampWin64().
+
+    >>> str(durationWin64(1072580000))
+    '0:01:47.258000'
+    >>> str(durationWin64(2146280000))
+    '0:03:34.628000'
+    """
+    if not isinstance(value, (float, int, long)):
+        raise TypeError("an integer or float is required")
+    if value < 0:
+        raise ValueError("value have to be a positive or nul integer")
+    return timedelta(microseconds=value/10)
+
+# Start of 64-bit Windows timestamp: 1st January 1600 at 00:00
+WIN64_TIMESTAMP_T0 = datetime(1601, 1, 1, 0, 0, 0)
+
+def timestampWin64(value):
+    """
+    Convert Windows 64-bit timestamp to string. The timestamp format is
+    a 64-bit number which represents number of 100ns since the
+    1st January 1601 at 00:00. Result is an unicode string.
+    See also durationWin64(). Maximum date is 28 may 60056.
+
+    >>> timestampWin64(0)
+    datetime.datetime(1601, 1, 1, 0, 0)
+    >>> timestampWin64(127840491566710000)
+    datetime.datetime(2006, 2, 10, 12, 45, 56, 671000)
+    """
+    try:
+        return WIN64_TIMESTAMP_T0 + durationWin64(value)
+    except OverflowError:
+        raise ValueError(_("date newer than year %s (value=%s)") % (MAXYEAR, value))
+
+# Start of 60-bit UUID timestamp: 15 October 1582 at 00:00
+UUID60_TIMESTAMP_T0 = datetime(1582, 10, 15, 0, 0, 0)
+
+def timestampUUID60(value):
+    """
+    Convert UUID 60-bit timestamp to string. The timestamp format is
+    a 60-bit number which represents number of 100ns since the
+    the 15 October 1582 at 00:00. Result is an unicode string.
+
+    >>> timestampUUID60(0)
+    datetime.datetime(1582, 10, 15, 0, 0)
+    >>> timestampUUID60(130435676263032368)
+    datetime.datetime(1996, 2, 14, 5, 13, 46, 303236)
+    """
+    if not isinstance(value, (float, int, long)):
+        raise TypeError("an integer or float is required")
+    if value < 0:
+        raise ValueError("value have to be a positive or nul integer")
+    try:
+        return UUID60_TIMESTAMP_T0 + timedelta(microseconds=value/10)
+    except OverflowError:
+        raise ValueError(_("timestampUUID60() overflow (value=%s)") % value)
+
+def humanDatetime(value, strip_microsecond=True):
+    """
+    Convert a timestamp to Unicode string: use ISO format with space separator.
+
+    >>> humanDatetime( datetime(2006, 7, 29, 12, 20, 44) )
+    u'2006-07-29 12:20:44'
+    >>> humanDatetime( datetime(2003, 6, 30, 16, 0, 5, 370000) )
+    u'2003-06-30 16:00:05'
+    >>> humanDatetime( datetime(2003, 6, 30, 16, 0, 5, 370000), False )
+    u'2003-06-30 16:00:05.370000'
+    """
+    text = unicode(value.isoformat())
+    text = text.replace('T', ' ')
+    if strip_microsecond and "." in text:
+        text = text.split(".")[0]
+    return text
+
+NEWLINES_REGEX = re.compile("\n+")
+
+def normalizeNewline(text):
+    r"""
+    Replace Windows and Mac newlines with Unix newlines.
+    Replace multiple consecutive newlines with one newline.
+
+    >>> normalizeNewline('a\r\nb')
+    'a\nb'
+    >>> normalizeNewline('a\r\rb')
+    'a\nb'
+    >>> normalizeNewline('a\n\nb')
+    'a\nb'
+    """
+    text = text.replace("\r\n", "\n")
+    text = text.replace("\r", "\n")
+    return NEWLINES_REGEX.sub("\n", text)
+
diff --git a/lib/hachoir_core/version.py b/lib/hachoir_core/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..c5e95447d0b16bc24ea2266a0a4e2710f1c6e94a
--- /dev/null
+++ b/lib/hachoir_core/version.py
@@ -0,0 +1,5 @@
+PACKAGE = "hachoir-core"
+VERSION = "1.3.3"
+WEBSITE = 'http://bitbucket.org/haypo/hachoir/wiki/hachoir-core'
+LICENSE = 'GNU GPL v2'
+
diff --git a/lib/hachoir_metadata/__init__.py b/lib/hachoir_metadata/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ab4743cf6b63a790d1b3b3ee3ce2542292f5926
--- /dev/null
+++ b/lib/hachoir_metadata/__init__.py
@@ -0,0 +1,15 @@
+from hachoir_metadata.version import VERSION as __version__
+from hachoir_metadata.metadata import extractMetadata
+
+# Just import the module,
+# each module use registerExtractor() method
+import hachoir_metadata.archive
+import hachoir_metadata.audio
+import hachoir_metadata.file_system
+import hachoir_metadata.image
+import hachoir_metadata.jpeg
+import hachoir_metadata.misc
+import hachoir_metadata.program
+import hachoir_metadata.riff
+import hachoir_metadata.video
+
diff --git a/lib/hachoir_metadata/archive.py b/lib/hachoir_metadata/archive.py
new file mode 100644
index 0000000000000000000000000000000000000000..7fa39ea71d75e9cff4d35ad3773142465dfd9f07
--- /dev/null
+++ b/lib/hachoir_metadata/archive.py
@@ -0,0 +1,166 @@
+from hachoir_metadata.metadata_item import QUALITY_BEST, QUALITY_FASTEST
+from hachoir_metadata.safe import fault_tolerant, getValue
+from hachoir_metadata.metadata import (
+    RootMetadata, Metadata, MultipleMetadata, registerExtractor)
+from hachoir_parser.archive import (Bzip2Parser, CabFile, GzipParser,
+    TarFile, ZipFile, MarFile)
+from hachoir_core.tools import humanUnixAttributes
+from hachoir_core.i18n import _
+
+def maxNbFile(meta):
+    if meta.quality <= QUALITY_FASTEST:
+        return 0
+    if QUALITY_BEST <= meta.quality:
+        return None
+    return 1 + int(10 * meta.quality)
+
+def computeCompressionRate(meta):
+    """
+    Compute compression rate, sizes have to be in byte.
+    """
+    if not meta.has("file_size") \
+    or not meta.get("compr_size", 0):
+        return
+    file_size = meta.get("file_size")
+    if not file_size:
+        return
+    meta.compr_rate = float(file_size) / meta.get("compr_size")
+
+class Bzip2Metadata(RootMetadata):
+    def extract(self, zip):
+        if "file" in zip:
+            self.compr_size = zip["file"].size/8
+
+class GzipMetadata(RootMetadata):
+    def extract(self, gzip):
+        self.useHeader(gzip)
+        computeCompressionRate(self)
+
+    @fault_tolerant
+    def useHeader(self, gzip):
+        self.compression = gzip["compression"].display
+        if gzip["mtime"]:
+            self.last_modification = gzip["mtime"].value
+        self.os = gzip["os"].display
+        if gzip["has_filename"].value:
+            self.filename = getValue(gzip, "filename")
+        if gzip["has_comment"].value:
+            self.comment = getValue(gzip, "comment")
+        self.compr_size = gzip["file"].size/8
+        self.file_size = gzip["size"].value
+
+class ZipMetadata(MultipleMetadata):
+    def extract(self, zip):
+        max_nb = maxNbFile(self)
+        for index, field in enumerate(zip.array("file")):
+            if max_nb is not None and max_nb <= index:
+                self.warning("ZIP archive contains many files, but only first %s files are processed" % max_nb)
+                break
+            self.processFile(field)
+
+    @fault_tolerant
+    def processFile(self, field):
+        meta = Metadata(self)
+        meta.filename = field["filename"].value
+        meta.creation_date = field["last_mod"].value
+        meta.compression = field["compression"].display
+        if "data_desc" in field:
+            meta.file_size = field["data_desc/file_uncompressed_size"].value
+            if field["data_desc/file_compressed_size"].value:
+                meta.compr_size = field["data_desc/file_compressed_size"].value
+        else:
+            meta.file_size = field["uncompressed_size"].value
+            if field["compressed_size"].value:
+                meta.compr_size = field["compressed_size"].value
+        computeCompressionRate(meta)
+        self.addGroup(field.name, meta, "File \"%s\"" % meta.get('filename'))
+
+class TarMetadata(MultipleMetadata):
+    def extract(self, tar):
+        max_nb = maxNbFile(self)
+        for index, field in enumerate(tar.array("file")):
+            if max_nb is not None and max_nb <= index:
+                self.warning("TAR archive contains many files, but only first %s files are processed" % max_nb)
+                break
+            meta = Metadata(self)
+            self.extractFile(field, meta)
+            if meta.has("filename"):
+                title = _('File "%s"') % meta.getText('filename')
+            else:
+                title = _("File")
+            self.addGroup(field.name, meta, title)
+
+    @fault_tolerant
+    def extractFile(self, field, meta):
+        meta.filename = field["name"].value
+        meta.file_attr = humanUnixAttributes(field.getOctal("mode"))
+        meta.file_size = field.getOctal("size")
+        try:
+            if field.getOctal("mtime"):
+                meta.last_modification = field.getDatetime()
+        except ValueError:
+            pass
+        meta.file_type = field["type"].display
+        meta.author = "%s (uid=%s), group %s (gid=%s)" %\
+            (field["uname"].value, field.getOctal("uid"),
+             field["gname"].value, field.getOctal("gid"))
+
+
+class CabMetadata(MultipleMetadata):
+    def extract(self, cab):
+        if "folder[0]" in cab:
+            self.useFolder(cab["folder[0]"])
+        self.format_version = "Microsoft Cabinet version %s" % cab["cab_version"].display
+        self.comment = "%s folders, %s files" % (
+            cab["nb_folder"].value, cab["nb_files"].value)
+        max_nb = maxNbFile(self)
+        for index, field in enumerate(cab.array("file")):
+            if max_nb is not None and max_nb <= index:
+                self.warning("CAB archive contains many files, but only first %s files are processed" % max_nb)
+                break
+            self.useFile(field)
+
+    @fault_tolerant
+    def useFolder(self, folder):
+        compr = folder["compr_method"].display
+        if folder["compr_method"].value != 0:
+            compr += " (level %u)" % folder["compr_level"].value
+        self.compression = compr
+
+    @fault_tolerant
+    def useFile(self, field):
+        meta = Metadata(self)
+        meta.filename = field["filename"].value
+        meta.file_size = field["filesize"].value
+        meta.creation_date = field["timestamp"].value
+        attr = field["attributes"].value
+        if attr != "(none)":
+            meta.file_attr = attr
+        if meta.has("filename"):
+            title = _("File \"%s\"") % meta.getText('filename')
+        else:
+            title = _("File")
+        self.addGroup(field.name, meta, title)
+
+class MarMetadata(MultipleMetadata):
+    def extract(self, mar):
+        self.comment = "Contains %s files" % mar["nb_file"].value
+        self.format_version = "Microsoft Archive version %s" % mar["version"].value
+        max_nb = maxNbFile(self)
+        for index, field in enumerate(mar.array("file")):
+            if max_nb is not None and max_nb <= index:
+                self.warning("MAR archive contains many files, but only first %s files are processed" % max_nb)
+                break
+            meta = Metadata(self)
+            meta.filename = field["filename"].value
+            meta.compression = "None"
+            meta.file_size = field["filesize"].value
+            self.addGroup(field.name, meta, "File \"%s\"" % meta.getText('filename'))
+
+registerExtractor(CabFile, CabMetadata)
+registerExtractor(GzipParser, GzipMetadata)
+registerExtractor(Bzip2Parser, Bzip2Metadata)
+registerExtractor(TarFile, TarMetadata)
+registerExtractor(ZipFile, ZipMetadata)
+registerExtractor(MarFile, MarMetadata)
+
diff --git a/lib/hachoir_metadata/audio.py b/lib/hachoir_metadata/audio.py
new file mode 100644
index 0000000000000000000000000000000000000000..566613e0ccfe06249687b2eac8c76ed0dd67953b
--- /dev/null
+++ b/lib/hachoir_metadata/audio.py
@@ -0,0 +1,406 @@
+from hachoir_metadata.metadata import (registerExtractor,
+    Metadata, RootMetadata, MultipleMetadata)
+from hachoir_parser.audio import AuFile, MpegAudioFile, RealAudioFile, AiffFile, FlacParser
+from hachoir_parser.container import OggFile, RealMediaFile
+from hachoir_core.i18n import _
+from hachoir_core.tools import makePrintable, timedelta2seconds, humanBitRate
+from datetime import timedelta
+from hachoir_metadata.metadata_item import QUALITY_FAST, QUALITY_NORMAL, QUALITY_BEST
+from hachoir_metadata.safe import fault_tolerant, getValue
+
+def computeComprRate(meta, size):
+    if not meta.has("duration") \
+    or not meta.has("sample_rate") \
+    or not meta.has("bits_per_sample") \
+    or not meta.has("nb_channel") \
+    or not size:
+        return
+    orig_size = timedelta2seconds(meta.get("duration")) * meta.get('sample_rate') * meta.get('bits_per_sample') * meta.get('nb_channel')
+    meta.compr_rate = float(orig_size) / size
+
+def computeBitRate(meta):
+    if not meta.has("bits_per_sample") \
+    or not meta.has("nb_channel") \
+    or not meta.has("sample_rate"):
+        return
+    meta.bit_rate = meta.get('bits_per_sample') * meta.get('nb_channel') * meta.get('sample_rate')
+
+VORBIS_KEY_TO_ATTR = {
+    "ARTIST": "artist",
+    "ALBUM": "album",
+    "TRACKNUMBER": "track_number",
+    "TRACKTOTAL": "track_total",
+    "ENCODER": "producer",
+    "TITLE": "title",
+    "LOCATION": "location",
+    "DATE": "creation_date",
+    "ORGANIZATION": "organization",
+    "GENRE": "music_genre",
+    "": "comment",
+    "COMPOSER": "music_composer",
+    "DESCRIPTION": "comment",
+    "COMMENT": "comment",
+    "WWW": "url",
+    "WOAF": "url",
+    "LICENSE": "copyright",
+}
+
+@fault_tolerant
+def readVorbisComment(metadata, comment):
+    metadata.producer = getValue(comment, "vendor")
+    for item in comment.array("metadata"):
+        if "=" in item.value:
+            key, value = item.value.split("=", 1)
+            key = key.upper()
+            if key in VORBIS_KEY_TO_ATTR:
+                key = VORBIS_KEY_TO_ATTR[key]
+                setattr(metadata, key, value)
+            elif value:
+                metadata.warning("Skip Vorbis comment %s: %s" % (key, value))
+
+class OggMetadata(MultipleMetadata):
+    def extract(self, ogg):
+        granule_quotient = None
+        for index, page in enumerate(ogg.array("page")):
+            if "segments" not in page:
+                continue
+            page = page["segments"]
+            if "vorbis_hdr" in page:
+                meta = Metadata(self)
+                self.vorbisHeader(page["vorbis_hdr"], meta)
+                self.addGroup("audio[]", meta, "Audio")
+                if not granule_quotient and meta.has("sample_rate"):
+                    granule_quotient = meta.get('sample_rate')
+            if "theora_hdr" in page:
+                meta = Metadata(self)
+                self.theoraHeader(page["theora_hdr"], meta)
+                self.addGroup("video[]", meta, "Video")
+            if "video_hdr" in page:
+                meta = Metadata(self)
+                self.videoHeader(page["video_hdr"], meta)
+                self.addGroup("video[]", meta, "Video")
+                if not granule_quotient and meta.has("frame_rate"):
+                    granule_quotient = meta.get('frame_rate')
+            if "comment" in page:
+                readVorbisComment(self, page["comment"])
+            if 3 <= index:
+                # Only process pages 0..3
+                break
+
+        # Compute duration
+        if granule_quotient and QUALITY_NORMAL <= self.quality:
+            page = ogg.createLastPage()
+            if page and "abs_granule_pos" in page:
+                try:
+                    self.duration = timedelta(seconds=float(page["abs_granule_pos"].value) / granule_quotient)
+                except OverflowError:
+                    pass
+
+    def videoHeader(self, header, meta):
+        meta.compression = header["fourcc"].display
+        meta.width = header["width"].value
+        meta.height = header["height"].value
+        meta.bits_per_pixel = header["bits_per_sample"].value
+        if header["time_unit"].value:
+            meta.frame_rate = 10000000.0 / header["time_unit"].value
+
+    def theoraHeader(self, header, meta):
+        meta.compression = "Theora"
+        meta.format_version = "Theora version %u.%u (revision %u)" % (\
+            header["version_major"].value,
+            header["version_minor"].value,
+            header["version_revision"].value)
+        meta.width = header["frame_width"].value
+        meta.height = header["frame_height"].value
+        if header["fps_den"].value:
+            meta.frame_rate = float(header["fps_num"].value) / header["fps_den"].value
+        if header["aspect_ratio_den"].value:
+            meta.aspect_ratio = float(header["aspect_ratio_num"].value) / header["aspect_ratio_den"].value
+        meta.pixel_format = header["pixel_format"].display
+        meta.comment = "Quality: %s" % header["quality"].value
+
+    def vorbisHeader(self, header, meta):
+        meta.compression = u"Vorbis"
+        meta.sample_rate = header["audio_sample_rate"].value
+        meta.nb_channel = header["audio_channels"].value
+        meta.format_version = u"Vorbis version %s" % header["vorbis_version"].value
+        meta.bit_rate = header["bitrate_nominal"].value
+
+class AuMetadata(RootMetadata):
+    def extract(self, audio):
+        self.sample_rate = audio["sample_rate"].value
+        self.nb_channel = audio["channels"].value
+        self.compression = audio["codec"].display
+        if "info" in audio:
+            self.comment = audio["info"].value
+        self.bits_per_sample = audio.getBitsPerSample()
+        computeBitRate(self)
+        if "audio_data" in audio:
+            if self.has("bit_rate"):
+                self.duration = timedelta(seconds=float(audio["audio_data"].size) / self.get('bit_rate'))
+            computeComprRate(self, audio["audio_data"].size)
+
+class RealAudioMetadata(RootMetadata):
+    FOURCC_TO_BITRATE = {
+        u"28_8": 15200, # 28.8 kbit/sec (audio bit rate: 15.2 kbit/s)
+        u"14_4": 8000,  # 14.4 kbit/sec
+        u"lpcJ": 8000,  # 14.4 kbit/sec
+    }
+
+    def extract(self, real):
+        version = real["version"].value
+        if "metadata" in real:
+            self.useMetadata(real["metadata"])
+        self.useRoot(real)
+        self.format_version = "Real audio version %s" % version
+        if version == 3:
+            size = getValue(real, "data_size")
+        elif "filesize" in real and "headersize" in real:
+            size = (real["filesize"].value + 40) - (real["headersize"].value + 16)
+        else:
+            size = None
+        if size:
+            size *= 8
+            if self.has("bit_rate"):
+                sec = float(size) / self.get('bit_rate')
+                self.duration = timedelta(seconds=sec)
+            computeComprRate(self, size)
+
+    @fault_tolerant
+    def useMetadata(self, info):
+        self.title = info["title"].value
+        self.author = info["author"].value
+        self.copyright = info["copyright"].value
+        self.comment = info["comment"].value
+
+    @fault_tolerant
+    def useRoot(self, real):
+        self.bits_per_sample = 16   # FIXME: Is that correct?
+        if real["version"].value != 3:
+            self.sample_rate = real["sample_rate"].value
+            self.nb_channel = real["channels"].value
+        else:
+            self.sample_rate = 8000
+            self.nb_channel = 1
+        fourcc = getValue(real, "FourCC")
+        if fourcc:
+            self.compression = fourcc
+            try:
+                self.bit_rate = self.FOURCC_TO_BITRATE[fourcc]
+            except LookupError:
+                pass
+
+class RealMediaMetadata(MultipleMetadata):
+    KEY_TO_ATTR = {
+        "generated by": "producer",
+        "creation date": "creation_date",
+        "modification date": "last_modification",
+        "description": "comment",
+    }
+
+    def extract(self, media):
+        if "file_prop" in media:
+            self.useFileProp(media["file_prop"])
+        if "content_desc" in media:
+            self.useContentDesc(media["content_desc"])
+        for index, stream in enumerate(media.array("stream_prop")):
+            self.useStreamProp(stream, index)
+
+    @fault_tolerant
+    def useFileInfoProp(self, prop):
+        key = prop["name"].value.lower()
+        value = prop["value"].value
+        if key in self.KEY_TO_ATTR:
+            setattr(self, self.KEY_TO_ATTR[key], value)
+        elif value:
+            self.warning("Skip %s: %s" % (prop["name"].value, value))
+
+    @fault_tolerant
+    def useFileProp(self, prop):
+        self.bit_rate = prop["avg_bit_rate"].value
+        self.duration = timedelta(milliseconds=prop["duration"].value)
+
+    @fault_tolerant
+    def useContentDesc(self, content):
+        self.title = content["title"].value
+        self.author = content["author"].value
+        self.copyright = content["copyright"].value
+        self.comment = content["comment"].value
+
+    @fault_tolerant
+    def useStreamProp(self, stream, index):
+        meta = Metadata(self)
+        meta.comment = "Start: %s" % stream["stream_start"].value
+        if getValue(stream, "mime_type") == "logical-fileinfo":
+            for prop in stream.array("file_info/prop"):
+                self.useFileInfoProp(prop)
+        else:
+            meta.bit_rate = stream["avg_bit_rate"].value
+            meta.duration = timedelta(milliseconds=stream["duration"].value)
+            meta.mime_type = getValue(stream, "mime_type")
+        meta.title = getValue(stream, "desc")
+        self.addGroup("stream[%u]" % index, meta, "Stream #%u" % (1+index))
+
+class MpegAudioMetadata(RootMetadata):
+    TAG_TO_KEY = {
+        # ID3 version 2.2
+        "TP1": "author",
+        "COM": "comment",
+        "TEN": "producer",
+        "TRK": "track_number",
+        "TAL": "album",
+        "TT2": "title",
+        "TYE": "creation_date",
+        "TCO": "music_genre",
+
+        # ID3 version 2.3+
+        "TPE1": "author",
+        "COMM": "comment",
+        "TENC": "producer",
+        "TRCK": "track_number",
+        "TALB": "album",
+        "TIT2": "title",
+        "TYER": "creation_date",
+        "WXXX": "url",
+        "TCON": "music_genre",
+        "TLAN": "language",
+        "TCOP": "copyright",
+        "TDAT": "creation_date",
+        "TRDA": "creation_date",
+        "TORY": "creation_date",
+        "TIT1": "title",
+    }
+
+    def processID3v2(self, field):
+        # Read value
+        if "content" not in field:
+            return
+        content = field["content"]
+        if "text" not in content:
+            return
+        if "title" in content and content["title"].value:
+            value = "%s: %s" % (content["title"].value, content["text"].value)
+        else:
+            value = content["text"].value
+
+        # Known tag?
+        tag = field["tag"].value
+        if tag not in self.TAG_TO_KEY:
+            if tag:
+                if isinstance(tag, str):
+                    tag = makePrintable(tag, "ISO-8859-1", to_unicode=True)
+                self.warning("Skip ID3v2 tag %s: %s" % (tag, value))
+            return
+        key = self.TAG_TO_KEY[tag]
+        setattr(self, key, value)
+
+    def readID3v2(self, id3):
+        for field in id3:
+            if field.is_field_set and "tag" in field:
+                self.processID3v2(field)
+
+    def extract(self, mp3):
+        if "/frames/frame[0]" in mp3:
+            frame = mp3["/frames/frame[0]"]
+            self.nb_channel = (frame.getNbChannel(), frame["channel_mode"].display)
+            self.format_version = u"MPEG version %s layer %s" % \
+                (frame["version"].display, frame["layer"].display)
+            self.sample_rate = frame.getSampleRate()
+            self.bits_per_sample = 16
+            if mp3["frames"].looksConstantBitRate():
+                self.computeBitrate(frame)
+            else:
+                self.computeVariableBitrate(mp3)
+        if "id3v1" in mp3:
+            id3 = mp3["id3v1"]
+            self.comment = id3["comment"].value
+            self.author = id3["author"].value
+            self.title = id3["song"].value
+            self.album = id3["album"].value
+            if id3["year"].value != "0":
+                self.creation_date = id3["year"].value
+            if "track_nb" in id3:
+                self.track_number = id3["track_nb"].value
+        if "id3v2" in mp3:
+            self.readID3v2(mp3["id3v2"])
+        if "frames" in mp3:
+            computeComprRate(self, mp3["frames"].size)
+
+    def computeBitrate(self, frame):
+        bit_rate = frame.getBitRate() # may returns None on error
+        if not bit_rate:
+            return
+        self.bit_rate = (bit_rate, _("%s (constant)") % humanBitRate(bit_rate))
+        self.duration = timedelta(seconds=float(frame["/frames"].size) / bit_rate)
+
+    def computeVariableBitrate(self, mp3):
+        if self.quality <= QUALITY_FAST:
+            return
+        count = 0
+        if QUALITY_BEST <= self.quality:
+            self.warning("Process all MPEG audio frames to compute exact duration")
+            max_count = None
+        else:
+            max_count = 500 * self.quality
+        total_bit_rate = 0.0
+        for index, frame in enumerate(mp3.array("frames/frame")):
+            if index < 3:
+                continue
+            bit_rate = frame.getBitRate()
+            if bit_rate:
+                total_bit_rate += float(bit_rate)
+                count += 1
+                if max_count and max_count <= count:
+                    break
+        if not count:
+            return
+        bit_rate = total_bit_rate / count
+        self.bit_rate = (bit_rate,
+            _("%s (Variable bit rate)") % humanBitRate(bit_rate))
+        duration = timedelta(seconds=float(mp3["frames"].size) / bit_rate)
+        self.duration = duration
+
+class AiffMetadata(RootMetadata):
+    def extract(self, aiff):
+        if "common" in aiff:
+            self.useCommon(aiff["common"])
+        computeBitRate(self)
+
+    @fault_tolerant
+    def useCommon(self, info):
+        self.nb_channel = info["nb_channel"].value
+        self.bits_per_sample = info["sample_size"].value
+        self.sample_rate = getValue(info, "sample_rate")
+        if self.has("sample_rate"):
+            rate = self.get("sample_rate")
+            if rate:
+                sec = float(info["nb_sample"].value) / rate
+                self.duration = timedelta(seconds=sec)
+        if "codec" in info:
+            self.compression = info["codec"].display
+
+class FlacMetadata(RootMetadata):
+    def extract(self, flac):
+        if "metadata/stream_info/content" in flac:
+            self.useStreamInfo(flac["metadata/stream_info/content"])
+        if "metadata/comment/content" in flac:
+            readVorbisComment(self, flac["metadata/comment/content"])
+
+    @fault_tolerant
+    def useStreamInfo(self, info):
+        self.nb_channel = info["nb_channel"].value + 1
+        self.bits_per_sample = info["bits_per_sample"].value + 1
+        self.sample_rate = info["sample_hertz"].value
+        sec = info["total_samples"].value
+        if sec:
+            sec = float(sec) / info["sample_hertz"].value
+            self.duration = timedelta(seconds=sec)
+
+registerExtractor(AuFile, AuMetadata)
+registerExtractor(MpegAudioFile, MpegAudioMetadata)
+registerExtractor(OggFile, OggMetadata)
+registerExtractor(RealMediaFile, RealMediaMetadata)
+registerExtractor(RealAudioFile, RealAudioMetadata)
+registerExtractor(AiffFile, AiffMetadata)
+registerExtractor(FlacParser, FlacMetadata)
+
diff --git a/lib/hachoir_metadata/config.py b/lib/hachoir_metadata/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..c45d6a15013d792ad4a14257d12ce87466c4138e
--- /dev/null
+++ b/lib/hachoir_metadata/config.py
@@ -0,0 +1,2 @@
+MAX_STR_LENGTH = 300  # characters
+RAW_OUTPUT = False
diff --git a/lib/hachoir_metadata/file_system.py b/lib/hachoir_metadata/file_system.py
new file mode 100644
index 0000000000000000000000000000000000000000..b111c486202f050cddd96b896dc05752dbc53d70
--- /dev/null
+++ b/lib/hachoir_metadata/file_system.py
@@ -0,0 +1,28 @@
+from hachoir_metadata.metadata import RootMetadata, registerExtractor
+from hachoir_metadata.safe import fault_tolerant
+from hachoir_parser.file_system import ISO9660
+from datetime import datetime
+
+class ISO9660_Metadata(RootMetadata):
+    def extract(self, iso):
+        desc = iso['volume[0]/content']
+        self.title = desc['volume_id'].value
+        self.title = desc['vol_set_id'].value
+        self.author = desc['publisher'].value
+        self.author = desc['data_preparer'].value
+        self.producer = desc['application'].value
+        self.copyright = desc['copyright'].value
+        self.readTimestamp('creation_date', desc['creation_ts'].value)
+        self.readTimestamp('last_modification', desc['modification_ts'].value)
+
+    @fault_tolerant
+    def readTimestamp(self, key, value):
+        if value.startswith("0000"):
+            return
+        value = datetime(
+            int(value[0:4]), int(value[4:6]), int(value[6:8]),
+            int(value[8:10]), int(value[10:12]), int(value[12:14]))
+        setattr(self, key, value)
+
+registerExtractor(ISO9660, ISO9660_Metadata)
+
diff --git a/lib/hachoir_metadata/filter.py b/lib/hachoir_metadata/filter.py
new file mode 100644
index 0000000000000000000000000000000000000000..b4af8e3c6dc396ba95cb18b5b0b9e7049d3a4e04
--- /dev/null
+++ b/lib/hachoir_metadata/filter.py
@@ -0,0 +1,52 @@
+from hachoir_metadata.timezone import UTC
+from datetime import date, datetime
+
+# Year in 1850..2030
+MIN_YEAR = 1850
+MAX_YEAR = 2030
+
+class Filter:
+    def __init__(self, valid_types, min=None, max=None):
+        self.types = valid_types
+        self.min = min
+        self.max = max
+
+    def __call__(self, value):
+        if not isinstance(value, self.types):
+            return True
+        if self.min is not None and value < self.min:
+            return False
+        if self.max is not None and self.max < value:
+            return False
+        return True
+
+class NumberFilter(Filter):
+    def __init__(self, min=None, max=None):
+        Filter.__init__(self, (int, long, float), min, max)
+
+class DatetimeFilter(Filter):
+    def __init__(self, min=None, max=None):
+        Filter.__init__(self, (date, datetime),
+            datetime(MIN_YEAR, 1, 1),
+            datetime(MAX_YEAR, 12, 31))
+        self.min_date = date(MIN_YEAR, 1, 1)
+        self.max_date = date(MAX_YEAR, 12, 31)
+        self.min_tz = datetime(MIN_YEAR, 1, 1, tzinfo=UTC)
+        self.max_tz = datetime(MAX_YEAR, 12, 31, tzinfo=UTC)
+
+    def __call__(self, value):
+        """
+        Use different min/max values depending on value type
+        (datetime with timezone, datetime or date).
+        """
+        if not isinstance(value, self.types):
+            return True
+        if hasattr(value, "tzinfo") and value.tzinfo:
+            return (self.min_tz <= value <= self.max_tz)
+        elif isinstance(value, datetime):
+            return (self.min <= value <= self.max)
+        else:
+            return (self.min_date <= value <= self.max_date)
+
+DATETIME_FILTER = DatetimeFilter()
+
diff --git a/lib/hachoir_metadata/formatter.py b/lib/hachoir_metadata/formatter.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d04f92000337a3fb860a27a370c32a03b278ae8
--- /dev/null
+++ b/lib/hachoir_metadata/formatter.py
@@ -0,0 +1,25 @@
+from hachoir_core.i18n import _, ngettext
+
+NB_CHANNEL_NAME = {1: _("mono"), 2: _("stereo")}
+
+def humanAudioChannel(value):
+    return NB_CHANNEL_NAME.get(value, unicode(value))
+
+def humanFrameRate(value):
+    if isinstance(value, (int, long, float)):
+        return _("%.1f fps") % value
+    else:
+        return value
+
+def humanComprRate(rate):
+    return u"%.1fx" % rate
+
+def humanAltitude(value):
+    return ngettext("%.1f meter", "%.1f meters", value) % value
+
+def humanPixelSize(value):
+    return ngettext("%s pixel", "%s pixels", value) % value
+
+def humanDPI(value):
+    return u"%s DPI" % value
+
diff --git a/lib/hachoir_metadata/image.py b/lib/hachoir_metadata/image.py
new file mode 100644
index 0000000000000000000000000000000000000000..905cdd7939f728fee4e7bf32c3220e5c6e289420
--- /dev/null
+++ b/lib/hachoir_metadata/image.py
@@ -0,0 +1,299 @@
+from hachoir_metadata.metadata import (registerExtractor,
+    Metadata, RootMetadata, MultipleMetadata)
+from hachoir_parser.image import (
+    BmpFile, IcoFile, PcxFile, GifFile, PngFile, TiffFile,
+    XcfFile, TargaFile, WMF_File, PsdFile)
+from hachoir_parser.image.png import getBitsPerPixel as pngBitsPerPixel
+from hachoir_parser.image.xcf import XcfProperty
+from hachoir_core.i18n import _
+from hachoir_metadata.safe import fault_tolerant
+
+def computeComprRate(meta, compr_size):
+    """
+    Compute image compression rate. Skip size of color palette, focus on
+    image pixels. Original size is width x height x bpp. Compressed size
+    is an argument (in bits).
+
+    Set "compr_data" with a string like "1.52x".
+    """
+    if not meta.has("width") \
+    or not meta.has("height") \
+    or not meta.has("bits_per_pixel"):
+        return
+    if not compr_size:
+        return
+    orig_size = meta.get('width') * meta.get('height') * meta.get('bits_per_pixel')
+    meta.compr_rate = float(orig_size) / compr_size
+
+class BmpMetadata(RootMetadata):
+    def extract(self, image):
+        if "header" not in image:
+            return
+        hdr = image["header"]
+        self.width = hdr["width"].value
+        self.height = hdr["height"].value
+        bpp = hdr["bpp"].value
+        if bpp:
+            if bpp <= 8 and "used_colors" in hdr:
+                self.nb_colors = hdr["used_colors"].value
+            self.bits_per_pixel = bpp
+        self.compression = hdr["compression"].display
+        self.format_version = u"Microsoft Bitmap version %s" % hdr.getFormatVersion()
+
+        self.width_dpi = hdr["horizontal_dpi"].value
+        self.height_dpi = hdr["vertical_dpi"].value
+
+        if "pixels" in image:
+            computeComprRate(self, image["pixels"].size)
+
+class TiffMetadata(RootMetadata):
+    key_to_attr = {
+        "img_width": "width",
+        "img_height": "width",
+
+        # TODO: Enable that (need link to value)
+#        "description": "comment",
+#        "doc_name": "title",
+#        "orientation": "image_orientation",
+    }
+    def extract(self, tiff):
+        if "ifd" in tiff:
+            self.useIFD(tiff["ifd"])
+
+    def useIFD(self, ifd):
+        for field in ifd:
+            try:
+                attrname = self.key_to_attr[field.name]
+            except KeyError:
+                continue
+            if "value" not in field:
+                continue
+            value = field["value"].value
+            setattr(self, attrname, value)
+
+class IcoMetadata(MultipleMetadata):
+    color_to_bpp = {
+        2: 1,
+        16: 4,
+        256: 8
+    }
+
+    def extract(self, icon):
+        for index, header in enumerate(icon.array("icon_header")):
+            image = Metadata(self)
+
+            # Read size and colors from header
+            image.width = header["width"].value
+            image.height = header["height"].value
+            bpp = header["bpp"].value
+            nb_colors = header["nb_color"].value
+            if nb_colors != 0:
+                image.nb_colors = nb_colors
+                if bpp == 0 and nb_colors in self.color_to_bpp:
+                    bpp = self.color_to_bpp[nb_colors]
+            elif bpp == 0:
+                bpp = 8
+            image.bits_per_pixel = bpp
+            image.setHeader(_("Icon #%u (%sx%s)")
+                % (1+index, image.get("width", "?"), image.get("height", "?")))
+
+            # Read compression from data (if available)
+            key = "icon_data[%u]/header/codec" % index
+            if key in icon:
+                image.compression = icon[key].display
+            key = "icon_data[%u]/pixels" % index
+            if key in icon:
+                computeComprRate(image, icon[key].size)
+
+            # Store new image
+            self.addGroup("image[%u]" % index, image)
+
+class PcxMetadata(RootMetadata):
+    @fault_tolerant
+    def extract(self, pcx):
+        self.width = 1 + pcx["xmax"].value
+        self.height = 1 + pcx["ymax"].value
+        self.width_dpi = pcx["horiz_dpi"].value
+        self.height_dpi = pcx["vert_dpi"].value
+        self.bits_per_pixel = pcx["bpp"].value
+        if 1 <= pcx["bpp"].value <= 8:
+            self.nb_colors = 2 ** pcx["bpp"].value
+        self.compression = _("Run-length encoding (RLE)")
+        self.format_version = "PCX: %s" % pcx["version"].display
+        if "image_data" in pcx:
+            computeComprRate(self, pcx["image_data"].size)
+
+class XcfMetadata(RootMetadata):
+    # Map image type to bits/pixel
+    TYPE_TO_BPP = {0: 24, 1: 8, 2: 8}
+
+    def extract(self, xcf):
+        self.width = xcf["width"].value
+        self.height = xcf["height"].value
+        try:
+            self.bits_per_pixel = self.TYPE_TO_BPP[ xcf["type"].value ]
+        except KeyError:
+            pass
+        self.format_version = xcf["type"].display
+        self.readProperties(xcf)
+
+    @fault_tolerant
+    def processProperty(self, prop):
+        type = prop["type"].value
+        if type == XcfProperty.PROP_PARASITES:
+            for field in prop["data"]:
+                if "name" not in field or "data" not in field:
+                    continue
+                if field["name"].value == "gimp-comment":
+                    self.comment = field["data"].value
+        elif type == XcfProperty.PROP_COMPRESSION:
+            self.compression = prop["data/compression"].display
+        elif type == XcfProperty.PROP_RESOLUTION:
+            self.width_dpi = int(prop["data/xres"].value)
+            self.height_dpi = int(prop["data/yres"].value)
+
+    def readProperties(self, xcf):
+        for prop in xcf.array("property"):
+            self.processProperty(prop)
+
+class PngMetadata(RootMetadata):
+    TEXT_TO_ATTR = {
+        "software": "producer",
+    }
+
+    def extract(self, png):
+        if "header" in png:
+            self.useHeader(png["header"])
+        if "time" in png:
+            self.useTime(png["time"])
+        if "physical" in png:
+            self.usePhysical(png["physical"])
+        for comment in png.array("text"):
+            if "text" not in comment:
+                continue
+            keyword = comment["keyword"].value
+            text = comment["text"].value
+            try:
+                key = self.TEXT_TO_ATTR[keyword.lower()]
+                setattr(self, key, text)
+            except KeyError:
+                if keyword.lower() != "comment":
+                    self.comment = "%s=%s" % (keyword, text)
+                else:
+                    self.comment = text
+        compr_size = sum( data.size for data in png.array("data") )
+        computeComprRate(self, compr_size)
+
+    @fault_tolerant
+    def useTime(self, field):
+        self.creation_date = field.value
+
+    @fault_tolerant
+    def usePhysical(self, field):
+        self.width_dpi = field["pixel_per_unit_x"].value
+        self.height_dpi = field["pixel_per_unit_y"].value
+
+    @fault_tolerant
+    def useHeader(self, header):
+        self.width = header["width"].value
+        self.height = header["height"].value
+
+        # Read number of colors and pixel format
+        if "/palette/size" in header:
+            nb_colors = header["/palette/size"].value // 3
+        else:
+            nb_colors = None
+        if not header["has_palette"].value:
+            if header["has_alpha"].value:
+                self.pixel_format = _("RGBA")
+            else:
+                self.pixel_format = _("RGB")
+        elif "/transparency" in header:
+            self.pixel_format = _("Color index with transparency")
+            if nb_colors:
+                nb_colors -= 1
+        else:
+            self.pixel_format = _("Color index")
+        self.bits_per_pixel = pngBitsPerPixel(header)
+        if nb_colors:
+            self.nb_colors = nb_colors
+
+        # Read compression, timestamp, etc.
+        self.compression = header["compression"].display
+
+class GifMetadata(RootMetadata):
+    def extract(self, gif):
+        self.useScreen(gif["/screen"])
+        if self.has("bits_per_pixel"):
+            self.nb_colors = (1 << self.get('bits_per_pixel'))
+        self.compression = _("LZW")
+        self.format_version =  "GIF version %s" % gif["version"].value
+        for comments in gif.array("comments"):
+            for comment in gif.array(comments.name + "/comment"):
+                self.comment = comment.value
+        if "graphic_ctl/has_transp" in gif and gif["graphic_ctl/has_transp"].value:
+            self.pixel_format = _("Color index with transparency")
+        else:
+            self.pixel_format = _("Color index")
+
+    @fault_tolerant
+    def useScreen(self, screen):
+        self.width = screen["width"].value
+        self.height = screen["height"].value
+        self.bits_per_pixel = (1 + screen["bpp"].value)
+
+class TargaMetadata(RootMetadata):
+    def extract(self, tga):
+        self.width = tga["width"].value
+        self.height = tga["height"].value
+        self.bits_per_pixel = tga["bpp"].value
+        if tga["nb_color"].value:
+            self.nb_colors = tga["nb_color"].value
+        self.compression = tga["codec"].display
+        if "pixels" in tga:
+            computeComprRate(self, tga["pixels"].size)
+
+class WmfMetadata(RootMetadata):
+    def extract(self, wmf):
+        if wmf.isAPM():
+            if "amf_header/rect" in wmf:
+                rect = wmf["amf_header/rect"]
+                self.width = (rect["right"].value - rect["left"].value)
+                self.height = (rect["bottom"].value - rect["top"].value)
+            self.bits_per_pixel = 24
+        elif wmf.isEMF():
+            emf = wmf["emf_header"]
+            if "description" in emf:
+                desc = emf["description"].value
+                if "\0" in desc:
+                    self.producer, self.title = desc.split("\0", 1)
+                else:
+                    self.producer = desc
+            if emf["nb_colors"].value:
+                self.nb_colors = emf["nb_colors"].value
+                self.bits_per_pixel = 8
+            else:
+                self.bits_per_pixel = 24
+            self.width = emf["width_px"].value
+            self.height = emf["height_px"].value
+
+class PsdMetadata(RootMetadata):
+    @fault_tolerant
+    def extract(self, psd):
+        self.width = psd["width"].value
+        self.height = psd["height"].value
+        self.bits_per_pixel = psd["depth"].value * psd["nb_channels"].value
+        self.pixel_format = psd["color_mode"].display
+        self.compression = psd["compression"].display
+
+registerExtractor(IcoFile, IcoMetadata)
+registerExtractor(GifFile, GifMetadata)
+registerExtractor(XcfFile, XcfMetadata)
+registerExtractor(TargaFile, TargaMetadata)
+registerExtractor(PcxFile, PcxMetadata)
+registerExtractor(BmpFile, BmpMetadata)
+registerExtractor(PngFile, PngMetadata)
+registerExtractor(TiffFile, TiffMetadata)
+registerExtractor(WMF_File, WmfMetadata)
+registerExtractor(PsdFile, PsdMetadata)
+
diff --git a/lib/hachoir_metadata/jpeg.py b/lib/hachoir_metadata/jpeg.py
new file mode 100644
index 0000000000000000000000000000000000000000..29247dc6fe7696b536d31a5181e7478ff6b7463b
--- /dev/null
+++ b/lib/hachoir_metadata/jpeg.py
@@ -0,0 +1,289 @@
+from hachoir_metadata.metadata import RootMetadata, registerExtractor
+from hachoir_metadata.image import computeComprRate
+from hachoir_parser.image.exif import ExifEntry
+from hachoir_parser.image.jpeg import (
+    JpegFile, JpegChunk,
+    QUALITY_HASH_COLOR, QUALITY_SUM_COLOR,
+    QUALITY_HASH_GRAY, QUALITY_SUM_GRAY)
+from hachoir_core.field import MissingField
+from hachoir_core.i18n import _
+from hachoir_core.tools import makeUnicode
+from hachoir_metadata.safe import fault_tolerant
+from datetime import datetime
+
+def deg2float(degree, minute, second):
+    return degree + (float(minute) + float(second) / 60.0) / 60.0
+
+class JpegMetadata(RootMetadata):
+    EXIF_KEY = {
+        # Exif metadatas
+        ExifEntry.TAG_CAMERA_MANUFACTURER: "camera_manufacturer",
+        ExifEntry.TAG_CAMERA_MODEL: "camera_model",
+        ExifEntry.TAG_ORIENTATION: "image_orientation",
+        ExifEntry.TAG_EXPOSURE: "camera_exposure",
+        ExifEntry.TAG_FOCAL: "camera_focal",
+        ExifEntry.TAG_BRIGHTNESS: "camera_brightness",
+        ExifEntry.TAG_APERTURE: "camera_aperture",
+
+        # Generic metadatas
+        ExifEntry.TAG_IMG_TITLE: "title",
+        ExifEntry.TAG_SOFTWARE: "producer",
+        ExifEntry.TAG_FILE_TIMESTAMP: "creation_date",
+        ExifEntry.TAG_WIDTH: "width",
+        ExifEntry.TAG_HEIGHT: "height",
+        ExifEntry.TAG_USER_COMMENT: "comment",
+    }
+
+    IPTC_KEY = {
+         80: "author",
+         90: "city",
+        101: "country",
+        116: "copyright",
+        120: "title",
+        231: "comment",
+    }
+
+    orientation_name = {
+        1: _('Horizontal (normal)'),
+        2: _('Mirrored horizontal'),
+        3: _('Rotated 180'),
+        4: _('Mirrored vertical'),
+        5: _('Mirrored horizontal then rotated 90 counter-clock-wise'),
+        6: _('Rotated 90 clock-wise'),
+        7: _('Mirrored horizontal then rotated 90 clock-wise'),
+        8: _('Rotated 90 counter clock-wise'),
+    }
+
+    def extract(self, jpeg):
+        if "start_frame/content" in jpeg:
+            self.startOfFrame(jpeg["start_frame/content"])
+        elif "start_scan/content/nr_components" in jpeg:
+            self.bits_per_pixel = 8 * jpeg["start_scan/content/nr_components"].value
+        if "app0/content" in jpeg:
+            self.extractAPP0(jpeg["app0/content"])
+
+        if "exif/content" in jpeg:
+            for ifd in jpeg.array("exif/content/ifd"):
+                for entry in ifd.array("entry"):
+                    self.processIfdEntry(ifd, entry)
+                self.readGPS(ifd)
+        if "photoshop/content" in jpeg:
+            psd = jpeg["photoshop/content"]
+            if "version/content/reader_name" in psd:
+                self.producer = psd["version/content/reader_name"].value
+            if "iptc/content" in psd:
+                self.parseIPTC(psd["iptc/content"])
+        for field in jpeg.array("comment"):
+            if "content/comment" in field:
+                self.comment = field["content/comment"].value
+        self.computeQuality(jpeg)
+        if "data" in jpeg:
+            computeComprRate(self, jpeg["data"].size)
+        if not self.has("producer") and "photoshop" in jpeg:
+            self.producer = u"Adobe Photoshop"
+        if self.has("compression"):
+            self.compression = "JPEG"
+
+    @fault_tolerant
+    def startOfFrame(self, sof):
+        # Set compression method
+        key = sof["../type"].value
+        self.compression = "JPEG (%s)" % JpegChunk.START_OF_FRAME[key]
+
+        # Read image size and bits/pixel
+        self.width = sof["width"].value
+        self.height = sof["height"].value
+        nb_components = sof["nr_components"].value
+        self.bits_per_pixel = 8 * nb_components
+        if nb_components == 3:
+            self.pixel_format = _("YCbCr")
+        elif nb_components == 1:
+            self.pixel_format = _("Grayscale")
+            self.nb_colors = 256
+
+    @fault_tolerant
+    def computeQuality(self, jpeg):
+        # This function is an adaption to Python of ImageMagick code
+        # to compute JPEG quality using quantization tables
+
+        # Read quantization tables
+        qtlist = []
+        for dqt in jpeg.array("quantization"):
+            for qt in dqt.array("content/qt"):
+                # TODO: Take care of qt["index"].value?
+                qtlist.append(qt)
+        if not qtlist:
+            return
+
+        # Compute sum of all coefficients
+        sumcoeff = 0
+        for qt in qtlist:
+           coeff = qt.array("coeff")
+           for index in xrange(64):
+                sumcoeff += coeff[index].value
+
+        # Choose the right quality table and compute hash value
+        try:
+            hashval= qtlist[0]["coeff[2]"].value +  qtlist[0]["coeff[53]"].value
+            if 2 <= len(qtlist):
+                hashval += qtlist[1]["coeff[0]"].value + qtlist[1]["coeff[63]"].value
+                hashtable = QUALITY_HASH_COLOR
+                sumtable = QUALITY_SUM_COLOR
+            else:
+                hashtable = QUALITY_HASH_GRAY
+                sumtable = QUALITY_SUM_GRAY
+        except (MissingField, IndexError):
+            # A coefficient is missing, so don't compute JPEG quality
+            return
+
+        # Find the JPEG quality
+        for index in xrange(100):
+            if (hashval >= hashtable[index]) or (sumcoeff >= sumtable[index]):
+                quality = "%s%%" % (index + 1)
+                if (hashval > hashtable[index]) or (sumcoeff > sumtable[index]):
+                    quality += " " + _("(approximate)")
+                self.comment = "JPEG quality: %s" % quality
+                return
+
+    @fault_tolerant
+    def extractAPP0(self, app0):
+        self.format_version = u"JFIF %u.%02u" \
+            % (app0["ver_maj"].value, app0["ver_min"].value)
+        if "y_density" in app0:
+            self.width_dpi = app0["x_density"].value
+            self.height_dpi = app0["y_density"].value
+
+    @fault_tolerant
+    def processIfdEntry(self, ifd, entry):
+        # Skip unknown tags
+        tag = entry["tag"].value
+        if tag not in self.EXIF_KEY:
+            return
+        key = self.EXIF_KEY[tag]
+        if key in ("width", "height") and self.has(key):
+            # EXIF "valid size" are sometimes not updated when the image is scaled
+            # so we just ignore it
+            return
+
+        # Read value
+        if "value" in entry:
+            value = entry["value"].value
+        else:
+            value = ifd["value_%s" % entry.name].value
+
+        # Convert value to string
+        if tag == ExifEntry.TAG_ORIENTATION:
+            value = self.orientation_name.get(value, value)
+        elif tag == ExifEntry.TAG_EXPOSURE:
+            if not value:
+                return
+            if isinstance(value, float):
+                value = (value, u"1/%g" % (1/value))
+        elif entry["type"].value in (ExifEntry.TYPE_RATIONAL, ExifEntry.TYPE_SIGNED_RATIONAL):
+            value = (value, u"%.3g" % value)
+
+        # Store information
+        setattr(self, key, value)
+
+    @fault_tolerant
+    def readGPS(self, ifd):
+        # Read latitude and longitude
+        latitude_ref = None
+        longitude_ref = None
+        latitude = None
+        longitude = None
+        altitude_ref = 1
+        altitude = None
+        timestamp = None
+        datestamp = None
+        for entry in ifd.array("entry"):
+            tag = entry["tag"].value
+            if tag == ExifEntry.TAG_GPS_LATITUDE_REF:
+                if entry["value"].value == "N":
+                    latitude_ref = 1
+                else:
+                    latitude_ref = -1
+            elif tag == ExifEntry.TAG_GPS_LONGITUDE_REF:
+                if entry["value"].value == "E":
+                    longitude_ref = 1
+                else:
+                    longitude_ref = -1
+            elif tag == ExifEntry.TAG_GPS_ALTITUDE_REF:
+                if entry["value"].value == 1:
+                    altitude_ref = -1
+                else:
+                    altitude_ref = 1
+            elif tag == ExifEntry.TAG_GPS_LATITUDE:
+                latitude = [ifd["value_%s[%u]" % (entry.name, index)].value for index in xrange(3)]
+            elif tag == ExifEntry.TAG_GPS_LONGITUDE:
+                longitude = [ifd["value_%s[%u]" % (entry.name, index)].value for index in xrange(3)]
+            elif tag == ExifEntry.TAG_GPS_ALTITUDE:
+                altitude = ifd["value_%s" % entry.name].value
+            elif tag == ExifEntry.TAG_GPS_DATESTAMP:
+                datestamp = ifd["value_%s" % entry.name].value
+            elif tag == ExifEntry.TAG_GPS_TIMESTAMP:
+                items = [ifd["value_%s[%u]" % (entry.name, index)].value for index in xrange(3)]
+                items = map(int, items)
+                items = map(str, items)
+                timestamp = ":".join(items)
+        if latitude_ref and latitude:
+            value = deg2float(*latitude)
+            if latitude_ref < 0:
+                value = -value
+            self.latitude = value
+        if longitude and longitude_ref:
+            value = deg2float(*longitude)
+            if longitude_ref < 0:
+                value = -value
+            self.longitude = value
+        if altitude:
+            value = altitude
+            if altitude_ref < 0:
+                value = -value
+            self.altitude = value
+        if datestamp:
+            if timestamp:
+                datestamp += " " + timestamp
+            self.creation_date = datestamp
+
+    def parseIPTC(self, iptc):
+        datestr = hourstr = None
+        for field in iptc:
+            # Skip incomplete field
+            if "tag" not in field or "content" not in field:
+                continue
+
+            # Get value
+            value = field["content"].value
+            if isinstance(value, (str, unicode)):
+                value = value.replace("\r", " ")
+                value = value.replace("\n", " ")
+
+            # Skip unknown tag
+            tag = field["tag"].value
+            if tag == 55:
+                datestr = value
+                continue
+            if tag == 60:
+                hourstr = value
+                continue
+            if tag not in self.IPTC_KEY:
+                if tag != 0:
+                    self.warning("Skip IPTC key %s: %s" % (
+                        field["tag"].display, makeUnicode(value)))
+                continue
+            setattr(self, self.IPTC_KEY[tag], value)
+        if datestr and hourstr:
+            try:
+                year = int(datestr[0:4])
+                month = int(datestr[4:6])
+                day = int(datestr[6:8])
+                hour = int(hourstr[0:2])
+                min = int(hourstr[2:4])
+                sec = int(hourstr[4:6])
+                self.creation_date = datetime(year, month, day, hour, min, sec)
+            except ValueError:
+                pass
+
+registerExtractor(JpegFile, JpegMetadata)
+
diff --git a/lib/hachoir_metadata/metadata.py b/lib/hachoir_metadata/metadata.py
new file mode 100644
index 0000000000000000000000000000000000000000..37461c9d55986fdaea68b7d35d7f1a093e87dc00
--- /dev/null
+++ b/lib/hachoir_metadata/metadata.py
@@ -0,0 +1,291 @@
+# -*- coding: utf-8 -*-
+from hachoir_core.compatibility import any, sorted
+from hachoir_core.endian import endian_name
+from hachoir_core.tools import makePrintable, makeUnicode
+from hachoir_core.dict import Dict
+from hachoir_core.error import error, HACHOIR_ERRORS
+from hachoir_core.i18n import _
+from hachoir_core.log import Logger
+from hachoir_metadata.metadata_item import (
+    MIN_PRIORITY, MAX_PRIORITY, QUALITY_NORMAL)
+from hachoir_metadata.register import registerAllItems
+
+extractors = {}
+
+class Metadata(Logger):
+    header = u"Metadata"
+
+    def __init__(self, parent, quality=QUALITY_NORMAL):
+        assert isinstance(self.header, unicode)
+
+        # Limit to 0.0 .. 1.0
+        if parent:
+            quality = parent.quality
+        else:
+            quality = min(max(0.0, quality), 1.0)
+
+        object.__init__(self)
+        object.__setattr__(self, "_Metadata__data", {})
+        object.__setattr__(self, "quality", quality)
+        header = self.__class__.header
+        object.__setattr__(self, "_Metadata__header", header)
+
+        registerAllItems(self)
+
+    def _logger(self):
+        pass
+
+    def __setattr__(self, key, value):
+        """
+        Add a new value to data with name 'key'. Skip duplicates.
+        """
+        # Invalid key?
+        if key not in self.__data:
+            raise KeyError(_("%s has no metadata '%s'") % (self.__class__.__name__, key))
+
+        # Skip duplicates
+        self.__data[key].add(value)
+
+    def setHeader(self, text):
+        object.__setattr__(self, "header", text)
+
+    def getItems(self, key):
+        try:
+            return self.__data[key]
+        except LookupError:
+            raise ValueError("Metadata has no value '%s'" % key)
+
+    def getItem(self, key, index):
+        try:
+            return self.getItems(key)[index]
+        except (LookupError, ValueError):
+            return None
+
+    def has(self, key):
+        return 1 <= len(self.getItems(key))
+
+    def get(self, key, default=None, index=0):
+        """
+        Read first value of tag with name 'key'.
+
+        >>> from datetime import timedelta
+        >>> a = RootMetadata()
+        >>> a.duration = timedelta(seconds=2300)
+        >>> a.get('duration')
+        datetime.timedelta(0, 2300)
+        >>> a.get('author', u'Anonymous')
+        u'Anonymous'
+        """
+        item = self.getItem(key, index)
+        if item is None:
+            if default is None:
+                raise ValueError("Metadata has no value '%s' (index %s)" % (key, index))
+            else:
+                return default
+        return item.value
+
+    def getValues(self, key):
+        try:
+            data = self.__data[key]
+        except LookupError:
+            raise ValueError("Metadata has no value '%s'" % key)
+        return [ item.value for item in data ]
+
+    def getText(self, key, default=None, index=0):
+        """
+        Read first value, as unicode string, of tag with name 'key'.
+
+        >>> from datetime import timedelta
+        >>> a = RootMetadata()
+        >>> a.duration = timedelta(seconds=2300)
+        >>> a.getText('duration')
+        u'38 min 20 sec'
+        >>> a.getText('titre', u'Unknown')
+        u'Unknown'
+        """
+        item = self.getItem(key, index)
+        if item is not None:
+            return item.text
+        else:
+            return default
+
+    def register(self, data):
+        assert data.key not in self.__data
+        data.metadata = self
+        self.__data[data.key] = data
+
+    def __iter__(self):
+        return self.__data.itervalues()
+
+    def __str__(self):
+        r"""
+        Create a multi-line ASCII string (end of line is "\n") which
+        represents all datas.
+
+        >>> a = RootMetadata()
+        >>> a.author = "haypo"
+        >>> a.copyright = unicode("© Hachoir", "UTF-8")
+        >>> print a
+        Metadata:
+        - Author: haypo
+        - Copyright: \xa9 Hachoir
+
+        @see __unicode__() and exportPlaintext()
+        """
+        text = self.exportPlaintext()
+        return "\n".join( makePrintable(line, "ASCII") for line in text )
+
+    def __unicode__(self):
+        r"""
+        Create a multi-line Unicode string (end of line is "\n") which
+        represents all datas.
+
+        >>> a = RootMetadata()
+        >>> a.copyright = unicode("© Hachoir", "UTF-8")
+        >>> print repr(unicode(a))
+        u'Metadata:\n- Copyright: \xa9 Hachoir'
+
+        @see __str__() and exportPlaintext()
+        """
+        return "\n".join(self.exportPlaintext())
+
+    def exportPlaintext(self, priority=None, human=True, line_prefix=u"- ", title=None):
+        r"""
+        Convert metadata to multi-line Unicode string and skip datas
+        with priority lower than specified priority.
+
+        Default priority is Metadata.MAX_PRIORITY. If human flag is True, data
+        key are translated to better human name (eg. "bit_rate" becomes
+        "Bit rate") which may be translated using gettext.
+
+        If priority is too small, metadata are empty and so None is returned.
+
+        >>> print RootMetadata().exportPlaintext()
+        None
+        >>> meta = RootMetadata()
+        >>> meta.copyright = unicode("© Hachoir", "UTF-8")
+        >>> print repr(meta.exportPlaintext())
+        [u'Metadata:', u'- Copyright: \xa9 Hachoir']
+
+        @see __str__() and __unicode__()
+        """
+        if priority is not None:
+            priority = max(priority, MIN_PRIORITY)
+            priority = min(priority, MAX_PRIORITY)
+        else:
+            priority = MAX_PRIORITY
+        if not title:
+            title = self.header
+        text = ["%s:" % title]
+        for data in sorted(self):
+            if priority < data.priority:
+                break
+            if not data.values:
+                continue
+            if human:
+                title = data.description
+            else:
+                title = data.key
+            for item in data.values:
+                if human:
+                    value = item.text
+                else:
+                    value = makeUnicode(item.value)
+                text.append("%s%s: %s" % (line_prefix, title, value))
+        if 1 < len(text):
+            return text
+        else:
+            return None
+
+    def __nonzero__(self):
+        return any(item for item in self.__data.itervalues())
+
+class RootMetadata(Metadata):
+    def __init__(self, quality=QUALITY_NORMAL):
+        Metadata.__init__(self, None, quality)
+
+class MultipleMetadata(RootMetadata):
+    header = _("Common")
+    def __init__(self, quality=QUALITY_NORMAL):
+        RootMetadata.__init__(self, quality)
+        object.__setattr__(self, "_MultipleMetadata__groups", Dict())
+        object.__setattr__(self, "_MultipleMetadata__key_counter", {})
+
+    def __contains__(self, key):
+        return key in self.__groups
+
+    def __getitem__(self, key):
+        return self.__groups[key]
+
+    def iterGroups(self):
+        return self.__groups.itervalues()
+
+    def __nonzero__(self):
+        if RootMetadata.__nonzero__(self):
+            return True
+        return any(bool(group) for group in self.__groups)
+
+    def addGroup(self, key, metadata, header=None):
+        """
+        Add a new group (metadata of a sub-document).
+
+        Returns False if the group is skipped, True if it has been added.
+        """
+        if not metadata:
+            self.warning("Skip empty group %s" % key)
+            return False
+        if key.endswith("[]"):
+            key = key[:-2]
+            if key in self.__key_counter:
+                self.__key_counter[key] += 1
+            else:
+                self.__key_counter[key] = 1
+            key += "[%u]" % self.__key_counter[key]
+        if header:
+            metadata.setHeader(header)
+        self.__groups.append(key, metadata)
+        return True
+
+    def exportPlaintext(self, priority=None, human=True, line_prefix=u"- "):
+        common = Metadata.exportPlaintext(self, priority, human, line_prefix)
+        if common:
+            text = common
+        else:
+            text = []
+        for key, metadata in self.__groups.iteritems():
+            if not human:
+                title = key
+            else:
+                title = None
+            value = metadata.exportPlaintext(priority, human, line_prefix, title=title)
+            if value:
+                text.extend(value)
+        if len(text):
+            return text
+        else:
+            return None
+
+def registerExtractor(parser, extractor):
+    assert parser not in extractors
+    assert issubclass(extractor, RootMetadata)
+    extractors[parser] = extractor
+
+def extractMetadata(parser, quality=QUALITY_NORMAL):
+    """
+    Create a Metadata class from a parser. Returns None if no metadata
+    extractor does exist for the parser class.
+    """
+    try:
+        extractor = extractors[parser.__class__]
+    except KeyError:
+        return None
+    metadata = extractor(quality)
+    try:
+        metadata.extract(parser)
+    except HACHOIR_ERRORS, err:
+        error("Error during metadata extraction: %s" % unicode(err))
+    if metadata:
+        metadata.mime_type = parser.mime_type
+        metadata.endian = endian_name[parser.endian]
+    return metadata
+
diff --git a/lib/hachoir_metadata/metadata_item.py b/lib/hachoir_metadata/metadata_item.py
new file mode 100644
index 0000000000000000000000000000000000000000..bddd3b07425b89385225356ca55cc1bf14f3a7bc
--- /dev/null
+++ b/lib/hachoir_metadata/metadata_item.py
@@ -0,0 +1,146 @@
+from hachoir_core.tools import makeUnicode, normalizeNewline
+from hachoir_core.error import HACHOIR_ERRORS
+from hachoir_metadata import config
+from hachoir_metadata.setter import normalizeString
+
+MIN_PRIORITY = 100
+MAX_PRIORITY = 999
+
+QUALITY_FASTEST = 0.0
+QUALITY_FAST = 0.25
+QUALITY_NORMAL = 0.5
+QUALITY_GOOD = 0.75
+QUALITY_BEST = 1.0
+
+class DataValue:
+    def __init__(self, value, text):
+        self.value = value
+        self.text = text
+
+class Data:
+    def __init__(self, key, priority, description,
+    text_handler=None, type=None, filter=None, conversion=None):
+        """
+        handler is only used if value is not string nor unicode, prototype:
+           def handler(value) -> str/unicode
+        """
+        assert MIN_PRIORITY <= priority <= MAX_PRIORITY
+        assert isinstance(description, unicode)
+        self.metadata = None
+        self.key = key
+        self.description = description
+        self.values = []
+        if type and not isinstance(type, (tuple, list)):
+            type = (type,)
+        self.type = type
+        self.text_handler = text_handler
+        self.filter = filter
+        self.priority = priority
+        self.conversion = conversion
+
+    def _createItem(self, value, text=None):
+        if text is None:
+            if isinstance(value, unicode):
+                text = value
+            elif self.text_handler:
+                text = self.text_handler(value)
+                assert isinstance(text, unicode)
+            else:
+                text = makeUnicode(value)
+        return DataValue(value, text)
+
+    def add(self, value):
+        if isinstance(value, tuple):
+            if len(value) != 2:
+                raise ValueError("Data.add() only accept tuple of 2 elements: (value,text)")
+            value, text = value
+        else:
+            text = None
+
+        # Skip value 'None'
+        if value is None:
+            return
+
+        if isinstance(value, (str, unicode)):
+            value = normalizeString(value)
+            if not value:
+                return
+
+        # Convert string to Unicode string using charset ISO-8859-1
+        if self.conversion:
+            try:
+                new_value = self.conversion(self.metadata, self.key, value)
+            except HACHOIR_ERRORS, err:
+                self.metadata.warning("Error during conversion of %r value: %s" % (
+                    self.key, err))
+                return
+            if new_value is None:
+                dest_types = " or ".join(str(item.__name__) for item in self.type)
+                self.metadata.warning("Unable to convert %s=%r (%s) to %s" % (
+                    self.key, value, type(value).__name__, dest_types))
+                return
+            if isinstance(new_value, tuple):
+                if text:
+                    value = new_value[0]
+                else:
+                    value, text = new_value
+            else:
+                value = new_value
+        elif isinstance(value, str):
+            value = unicode(value, "ISO-8859-1")
+
+        if self.type and not isinstance(value, self.type):
+            dest_types = " or ".join(str(item.__name__) for item in self.type)
+            self.metadata.warning("Key %r: value %r type (%s) is not %s" % (
+                self.key, value, type(value).__name__, dest_types))
+            return
+
+        # Skip empty strings
+        if isinstance(value, unicode):
+            value = normalizeNewline(value)
+            if config.MAX_STR_LENGTH \
+            and config.MAX_STR_LENGTH < len(value):
+                value = value[:config.MAX_STR_LENGTH] + "(...)"
+
+        # Skip duplicates
+        if value in self:
+            return
+
+        # Use filter
+        if self.filter and not self.filter(value):
+            self.metadata.warning("Skip value %s=%r (filter)" % (self.key, value))
+            return
+
+        # For string, if you have "verlongtext" and "verylo",
+        # keep the longer value
+        if isinstance(value, unicode):
+            for index, item in enumerate(self.values):
+                item = item.value
+                if not isinstance(item, unicode):
+                    continue
+                if value.startswith(item):
+                    # Find longer value, replace the old one
+                    self.values[index] = self._createItem(value, text)
+                    return
+                if item.startswith(value):
+                    # Find truncated value, skip it
+                    return
+
+        # Add new value
+        self.values.append(self._createItem(value, text))
+
+    def __len__(self):
+        return len(self.values)
+
+    def __getitem__(self, index):
+        return self.values[index]
+
+    def __contains__(self, value):
+        for item in self.values:
+            if value == item.value:
+                return True
+        return False
+
+    def __cmp__(self, other):
+        return cmp(self.priority, other.priority)
+
diff --git a/lib/hachoir_metadata/misc.py b/lib/hachoir_metadata/misc.py
new file mode 100644
index 0000000000000000000000000000000000000000..04c70a6e7047118ae81ffc8c67aac88eabe84885
--- /dev/null
+++ b/lib/hachoir_metadata/misc.py
@@ -0,0 +1,262 @@
+from hachoir_metadata.metadata import RootMetadata, registerExtractor
+from hachoir_metadata.safe import fault_tolerant
+from hachoir_parser.container import SwfFile
+from hachoir_parser.misc import TorrentFile, TrueTypeFontFile, OLE2_File, PcfFile
+from hachoir_core.field import isString
+from hachoir_core.error import warning
+from hachoir_parser import guessParser
+from hachoir_metadata.setter import normalizeString
+
+class TorrentMetadata(RootMetadata):
+    KEY_TO_ATTR = {
+        u"announce": "url",
+        u"comment": "comment",
+        u"creation_date": "creation_date",
+    }
+    INFO_TO_ATTR = {
+        u"length": "file_size",
+        u"name": "filename",
+    }
+
+    def extract(self, torrent):
+        for field in torrent[0]:
+            self.processRoot(field)
+
+    @fault_tolerant
+    def processRoot(self, field):
+        if field.name in self.KEY_TO_ATTR:
+            key = self.KEY_TO_ATTR[field.name]
+            value = field.value
+            setattr(self, key, value)
+        elif field.name == "info" and "value" in field:
+            for field in field["value"]:
+                self.processInfo(field)
+
+    @fault_tolerant
+    def processInfo(self, field):
+        if field.name in self.INFO_TO_ATTR:
+            key = self.INFO_TO_ATTR[field.name]
+            value = field.value
+            setattr(self, key, value)
+        elif field.name == "piece_length":
+            self.comment = "Piece length: %s" % field.display
+
+class TTF_Metadata(RootMetadata):
+    NAMEID_TO_ATTR = {
+        0: "copyright",   # Copyright notice
+        3: "title",       # Unique font identifier
+        5: "version",     # Version string
+        8: "author",      # Manufacturer name
+        11: "url",        # URL Vendor
+        14: "copyright",  # License info URL
+    }
+
+    def extract(self, ttf):
+        if "header" in ttf:
+            self.extractHeader(ttf["header"])
+        if "names" in ttf:
+            self.extractNames(ttf["names"])
+
+    @fault_tolerant
+    def extractHeader(self, header):
+        self.creation_date = header["created"].value
+        self.last_modification = header["modified"].value
+        self.comment = u"Smallest readable size in pixels: %s pixels" % header["lowest"].value
+        self.comment = u"Font direction: %s" % header["font_dir"].display
+
+    @fault_tolerant
+    def extractNames(self, names):
+        offset = names["offset"].value
+        for header in names.array("header"):
+            key = header["nameID"].value
+            foffset = offset + header["offset"].value
+            field = names.getFieldByAddress(foffset*8)
+            if not field or not isString(field):
+                continue
+            value = field.value
+            if key not in self.NAMEID_TO_ATTR:
+                continue
+            key = self.NAMEID_TO_ATTR[key]
+            if key == "version" and value.startswith(u"Version "):
+                # "Version 1.2" => "1.2"
+                value = value[8:]
+            setattr(self, key, value)
+
+class OLE2_Metadata(RootMetadata):
+    SUMMARY_ID_TO_ATTR = {
+         2: "title",     # Title
+         3: "title",     # Subject
+         4: "author",
+         6: "comment",
+         8: "author",    # Last saved by
+        12: "creation_date",
+        13: "last_modification",
+        14: "nb_page",
+        18: "producer",
+    }
+    IGNORE_SUMMARY = set((
+        1, # Code page
+    ))
+
+    DOC_SUMMARY_ID_TO_ATTR = {
+         3: "title",     # Subject
+        14: "author",    # Manager
+    }
+    IGNORE_DOC_SUMMARY = set((
+        1, # Code page
+    ))
+
+    def extract(self, ole2):
+        self._extract(ole2)
+
+    def _extract(self, fieldset, main_document=True):
+        if main_document:
+            # _feedAll() is needed to make sure that we get all root[*] fragments
+            fieldset._feedAll()
+            if "root[0]" in fieldset:
+                self.useRoot(fieldset["root[0]"])
+        doc_summary = self.getField(fieldset, main_document, "doc_summary[0]")
+        if doc_summary:
+            self.useSummary(doc_summary, True)
+        word_doc = self.getField(fieldset, main_document, "word_doc[0]")
+        if word_doc:
+            self.useWordDocument(word_doc)
+        summary = self.getField(fieldset, main_document, "summary[0]")
+        if summary:
+            self.useSummary(summary, False)
+
+    @fault_tolerant
+    def useRoot(self, root):
+        stream = root.getSubIStream()
+        ministream = guessParser(stream)
+        if not ministream:
+            warning("Unable to create the OLE2 mini stream parser!")
+            return
+        self._extract(ministream, main_document=False)
+
+    def getField(self, fieldset, main_document, name):
+        if name not in fieldset:
+            return None
+        # _feedAll() is needed to make sure that we get all fragments
+        # eg. summary[0], summary[1], ..., summary[n]
+        fieldset._feedAll()
+        field = fieldset[name]
+        if main_document:
+            stream = field.getSubIStream()
+            field = guessParser(stream)
+            if not field:
+                warning("Unable to create the OLE2 parser for %s!" % name)
+                return None
+        return field
+
+    @fault_tolerant
+    def useSummary(self, summary, is_doc_summary):
+        if "os" in summary:
+            self.os = summary["os"].display
+        if "section[0]" not in summary:
+            return
+        summary = summary["section[0]"]
+        for property in summary.array("property_index"):
+            self.useProperty(summary, property, is_doc_summary)
+
+    @fault_tolerant
+    def useWordDocument(self, doc):
+        self.comment = "Encrypted: %s" % doc["fEncrypted"].value
+
+    @fault_tolerant
+    def useProperty(self, summary, property, is_doc_summary):
+        field = summary.getFieldByAddress(property["offset"].value*8)
+        if not field \
+        or "value" not in field:
+            return
+        field = field["value"]
+        if not field.hasValue():
+            return
+
+        # Get value
+        value = field.value
+        if isinstance(value, (str, unicode)):
+            value = normalizeString(value)
+            if not value:
+                return
+
+        # Get property identifier
+        prop_id = property["id"].value
+        if is_doc_summary:
+            id_to_attr = self.DOC_SUMMARY_ID_TO_ATTR
+            ignore = self.IGNORE_DOC_SUMMARY
+        else:
+            id_to_attr = self.SUMMARY_ID_TO_ATTR
+            ignore = self.IGNORE_SUMMARY
+        if prop_id in ignore:
+            return
+
+        # Get Hachoir metadata key
+        try:
+            key = id_to_attr[prop_id]
+            use_prefix = False
+        except LookupError:
+            key = "comment"
+            use_prefix = True
+        if use_prefix:
+            prefix = property["id"].display
+            if (prefix in ("TotalEditingTime", "LastPrinted")) \
+            and (not field):
+                # Ignore null time delta
+                return
+            value = "%s: %s" % (prefix, value)
+        else:
+            if (key == "last_modification") and (not field):
+                # Ignore null timestamp
+                return
+        setattr(self, key, value)
+
+class PcfMetadata(RootMetadata):
+    PROP_TO_KEY = {
+        'CHARSET_REGISTRY': 'charset',
+        'COPYRIGHT': 'copyright',
+        'WEIGHT_NAME': 'font_weight',
+        'FOUNDRY': 'author',
+        'FONT': 'title',
+        '_XMBDFED_INFO': 'producer',
+    }
+
+    def extract(self, pcf):
+        if "properties" in pcf:
+            self.useProperties(pcf["properties"])
+
+    def useProperties(self, properties):
+        last = properties["total_str_length"]
+        offset0 = last.address + last.size
+        for index in properties.array("property"):
+            # Search name and value
+            value = properties.getFieldByAddress(offset0+index["value_offset"].value*8)
+            if not value:
+                continue
+            value = value.value
+            if not value:
+                continue
+            name = properties.getFieldByAddress(offset0+index["name_offset"].value*8)
+            if not name:
+                continue
+            name = name.value
+            if name not in self.PROP_TO_KEY:
+                warning("Skip %s=%r" % (name, value))
+                continue
+            key = self.PROP_TO_KEY[name]
+            setattr(self, key, value)
+
+class SwfMetadata(RootMetadata):
+    def extract(self, swf):
+        self.height = swf["rect/ymax"].value # twips
+        self.width = swf["rect/xmax"].value # twips
+        self.format_version = "flash version %s" % swf["version"].value
+        self.frame_rate = swf["frame_rate"].value
+        self.comment = "Frame count: %s" % swf["frame_count"].value
+
+registerExtractor(TorrentFile, TorrentMetadata)
+registerExtractor(TrueTypeFontFile, TTF_Metadata)
+registerExtractor(OLE2_File, OLE2_Metadata)
+registerExtractor(PcfFile, PcfMetadata)
+registerExtractor(SwfFile, SwfMetadata)
+
diff --git a/lib/hachoir_metadata/program.py b/lib/hachoir_metadata/program.py
new file mode 100644
index 0000000000000000000000000000000000000000..a524cee62db8e902d8cda9b5dd0259bb0b61e531
--- /dev/null
+++ b/lib/hachoir_metadata/program.py
@@ -0,0 +1,100 @@
+from hachoir_metadata.metadata import RootMetadata, registerExtractor
+from hachoir_parser.program import ExeFile
+from hachoir_metadata.safe import fault_tolerant, getValue
+
+class ExeMetadata(RootMetadata):
+    KEY_TO_ATTR = {
+        u"ProductName": "title",
+        u"LegalCopyright": "copyright",
+        u"LegalTrademarks": "copyright",
+        u"LegalTrademarks1": "copyright",
+        u"LegalTrademarks2": "copyright",
+        u"CompanyName": "author",
+        u"BuildDate": "creation_date",
+        u"FileDescription": "title",
+        u"ProductVersion": "version",
+    }
+    SKIP_KEY = set((u"InternalName", u"OriginalFilename", u"FileVersion", u"BuildVersion"))
+
+    def extract(self, exe):
+        if exe.isPE():
+            self.extractPE(exe)
+        elif exe.isNE():
+            self.extractNE(exe)
+
+    def extractNE(self, exe):
+        if "ne_header" in exe:
+            self.useNE_Header(exe["ne_header"])
+        if "info" in exe:
+            self.useNEInfo(exe["info"])
+
+    @fault_tolerant
+    def useNEInfo(self, info):
+        for node in info.array("node"):
+            if node["name"].value == "StringFileInfo":
+                self.readVersionInfo(node["node[0]"])
+
+    def extractPE(self, exe):
+        # Read information from headers
+        if "pe_header" in exe:
+            self.usePE_Header(exe["pe_header"])
+        if "pe_opt_header" in exe:
+            self.usePE_OptHeader(exe["pe_opt_header"])
+
+        # Use PE resource
+        resource = exe.getResource()
+        if resource and "version_info/node[0]" in resource:
+            for node in resource.array("version_info/node[0]/node"):
+                if getValue(node, "name") == "StringFileInfo" \
+                and "node[0]" in node:
+                    self.readVersionInfo(node["node[0]"])
+
+    @fault_tolerant
+    def useNE_Header(self, hdr):
+        if hdr["is_dll"].value:
+            self.format_version = u"New-style executable: Dynamic-link library (DLL)"
+        elif hdr["is_win_app"].value:
+            self.format_version = u"New-style executable: Windows 3.x application"
+        else:
+            self.format_version = u"New-style executable for Windows 3.x"
+
+    @fault_tolerant
+    def usePE_Header(self, hdr):
+        self.creation_date = hdr["creation_date"].value
+        self.comment = "CPU: %s" % hdr["cpu"].display
+        if hdr["is_dll"].value:
+            self.format_version = u"Portable Executable: Dynamic-link library (DLL)"
+        else:
+            self.format_version = u"Portable Executable: Windows application"
+
+    @fault_tolerant
+    def usePE_OptHeader(self, hdr):
+        self.comment = "Subsystem: %s" % hdr["subsystem"].display
+
+    def readVersionInfo(self, info):
+        values = {}
+        for node in info.array("node"):
+            if "value" not in node or "name" not in node:
+                continue
+            value = node["value"].value.strip(" \0")
+            if not value:
+                continue
+            key = node["name"].value
+            values[key] = value
+
+        if "ProductName" in values and "FileDescription" in values:
+            # Make sure that FileDescription is set before ProductName
+            # as title value
+            self.title = values["FileDescription"]
+            self.title = values["ProductName"]
+            del values["FileDescription"]
+            del values["ProductName"]
+
+        for key, value in values.iteritems():
+            if key in self.KEY_TO_ATTR:
+                setattr(self, self.KEY_TO_ATTR[key], value)
+            elif key not in self.SKIP_KEY:
+                self.comment = "%s=%s" % (key, value)
+
+registerExtractor(ExeFile, ExeMetadata)
+
diff --git a/lib/hachoir_metadata/qt/__init__.py b/lib/hachoir_metadata/qt/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/lib/hachoir_metadata/qt/dialog.ui b/lib/hachoir_metadata/qt/dialog.ui
new file mode 100644
index 0000000000000000000000000000000000000000..498a8daeffe097e20762c0a6b49f1cd7f3d394ca
--- /dev/null
+++ b/lib/hachoir_metadata/qt/dialog.ui
@@ -0,0 +1,64 @@
+<ui version="4.0" >
+ <class>Form</class>
+ <widget class="QWidget" name="Form" >
+  <property name="geometry" >
+   <rect>
+    <x>0</x>
+    <y>0</y>
+    <width>441</width>
+    <height>412</height>
+   </rect>
+  </property>
+  <property name="windowTitle" >
+   <string>hachoir-metadata</string>
+  </property>
+  <layout class="QVBoxLayout" name="verticalLayout" >
+   <item>
+    <layout class="QHBoxLayout" name="horizontalLayout_2" >
+     <item>
+      <widget class="QPushButton" name="open_button" >
+       <property name="text" >
+        <string>Open</string>
+       </property>
+      </widget>
+     </item>
+     <item>
+      <widget class="QComboBox" name="files_combo" >
+       <property name="sizePolicy" >
+        <sizepolicy vsizetype="Fixed" hsizetype="Expanding" >
+         <horstretch>0</horstretch>
+         <verstretch>0</verstretch>
+        </sizepolicy>
+       </property>
+      </widget>
+     </item>
+    </layout>
+   </item>
+   <item>
+    <widget class="QTableWidget" name="metadata_table" >
+     <property name="alternatingRowColors" >
+      <bool>true</bool>
+     </property>
+     <property name="showGrid" >
+      <bool>false</bool>
+     </property>
+     <property name="rowCount" >
+      <number>0</number>
+     </property>
+     <property name="columnCount" >
+      <number>0</number>
+     </property>
+    </widget>
+   </item>
+   <item>
+    <widget class="QPushButton" name="quit_button" >
+     <property name="text" >
+      <string>Quit</string>
+     </property>
+    </widget>
+   </item>
+  </layout>
+ </widget>
+ <resources/>
+ <connections/>
+</ui>
diff --git a/lib/hachoir_metadata/qt/dialog_ui.py b/lib/hachoir_metadata/qt/dialog_ui.py
new file mode 100644
index 0000000000000000000000000000000000000000..970257cfd9e2583e9fa5668444aef5db020fc6d2
--- /dev/null
+++ b/lib/hachoir_metadata/qt/dialog_ui.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+
+# Form implementation generated from reading ui file 'hachoir_metadata/qt/dialog.ui'
+#
+# Created: Mon Jul 26 03:10:06 2010
+#      by: PyQt4 UI code generator 4.7.3
+#
+# WARNING! All changes made in this file will be lost!
+
+from PyQt4 import QtCore, QtGui
+
+class Ui_Form(object):
+    def setupUi(self, Form):
+        Form.setObjectName("Form")
+        Form.resize(441, 412)
+        self.verticalLayout = QtGui.QVBoxLayout(Form)
+        self.verticalLayout.setObjectName("verticalLayout")
+        self.horizontalLayout_2 = QtGui.QHBoxLayout()
+        self.horizontalLayout_2.setObjectName("horizontalLayout_2")
+        self.open_button = QtGui.QPushButton(Form)
+        self.open_button.setObjectName("open_button")
+        self.horizontalLayout_2.addWidget(self.open_button)
+        self.files_combo = QtGui.QComboBox(Form)
+        sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
+        sizePolicy.setHorizontalStretch(0)
+        sizePolicy.setVerticalStretch(0)
+        sizePolicy.setHeightForWidth(self.files_combo.sizePolicy().hasHeightForWidth())
+        self.files_combo.setSizePolicy(sizePolicy)
+        self.files_combo.setObjectName("files_combo")
+        self.horizontalLayout_2.addWidget(self.files_combo)
+        self.verticalLayout.addLayout(self.horizontalLayout_2)
+        self.metadata_table = QtGui.QTableWidget(Form)
+        self.metadata_table.setAlternatingRowColors(True)
+        self.metadata_table.setShowGrid(False)
+        self.metadata_table.setRowCount(0)
+        self.metadata_table.setColumnCount(0)
+        self.metadata_table.setObjectName("metadata_table")
+        self.metadata_table.setColumnCount(0)
+        self.metadata_table.setRowCount(0)
+        self.verticalLayout.addWidget(self.metadata_table)
+        self.quit_button = QtGui.QPushButton(Form)
+        self.quit_button.setObjectName("quit_button")
+        self.verticalLayout.addWidget(self.quit_button)
+
+        self.retranslateUi(Form)
+        QtCore.QMetaObject.connectSlotsByName(Form)
+
+    def retranslateUi(self, Form):
+        Form.setWindowTitle(QtGui.QApplication.translate("Form", "hachoir-metadata", None, QtGui.QApplication.UnicodeUTF8))
+        self.open_button.setText(QtGui.QApplication.translate("Form", "Open", None, QtGui.QApplication.UnicodeUTF8))
+        self.quit_button.setText(QtGui.QApplication.translate("Form", "Quit", None, QtGui.QApplication.UnicodeUTF8))
+
diff --git a/lib/hachoir_metadata/register.py b/lib/hachoir_metadata/register.py
new file mode 100644
index 0000000000000000000000000000000000000000..3cbde86dd86987f78b357e6fb87b69fe144aa2b2
--- /dev/null
+++ b/lib/hachoir_metadata/register.py
@@ -0,0 +1,112 @@
+from hachoir_core.i18n import _
+from hachoir_core.tools import (
+    humanDuration, humanBitRate,
+    humanFrequency, humanBitSize, humanFilesize,
+    humanDatetime)
+from hachoir_core.language import Language
+from hachoir_metadata.filter import Filter, NumberFilter, DATETIME_FILTER
+from datetime import date, datetime, timedelta
+from hachoir_metadata.formatter import (
+    humanAudioChannel, humanFrameRate, humanComprRate, humanAltitude,
+    humanPixelSize, humanDPI)
+from hachoir_metadata.setter import (
+    setDatetime, setTrackNumber, setTrackTotal, setLanguage)
+from hachoir_metadata.metadata_item import Data
+
+MIN_SAMPLE_RATE = 1000              # 1 kHz
+MAX_SAMPLE_RATE = 192000            # 192 kHz
+MAX_NB_CHANNEL = 8                  # 8 channels
+MAX_WIDTH = 20000                   # 20 000 pixels
+MAX_BIT_RATE = 500 * 1024 * 1024    # 500 Mbit/s
+MAX_HEIGHT = MAX_WIDTH
+MAX_DPI_WIDTH = 10000
+MAX_DPI_HEIGHT = MAX_DPI_WIDTH
+MAX_NB_COLOR = 2 ** 24              # 16 million of color
+MAX_BITS_PER_PIXEL = 256            # 256 bits/pixel
+MAX_FRAME_RATE = 150                # 150 frame/sec
+MAX_NB_PAGE = 20000
+MAX_COMPR_RATE = 1000.0
+MIN_COMPR_RATE = 0.001
+MAX_TRACK = 999
+
+DURATION_FILTER = Filter(timedelta,
+    timedelta(milliseconds=1),
+    timedelta(days=365))
+
+def registerAllItems(meta):
+    meta.register(Data("title", 100, _("Title"), type=unicode))
+    meta.register(Data("artist", 101, _("Artist"), type=unicode))
+    meta.register(Data("author", 102, _("Author"), type=unicode))
+    meta.register(Data("music_composer", 103, _("Music composer"), type=unicode))
+
+    meta.register(Data("album", 200, _("Album"), type=unicode))
+    meta.register(Data("duration", 201, _("Duration"), # integer in milliseconde
+        type=timedelta, text_handler=humanDuration, filter=DURATION_FILTER))
+    meta.register(Data("nb_page", 202, _("Nb page"), filter=NumberFilter(1, MAX_NB_PAGE)))
+    meta.register(Data("music_genre", 203, _("Music genre"), type=unicode))
+    meta.register(Data("language", 204, _("Language"), conversion=setLanguage, type=Language))
+    meta.register(Data("track_number", 205, _("Track number"), conversion=setTrackNumber,
+        filter=NumberFilter(1, MAX_TRACK), type=(int, long)))
+    meta.register(Data("track_total", 206, _("Track total"), conversion=setTrackTotal,
+        filter=NumberFilter(1, MAX_TRACK), type=(int, long)))
+    meta.register(Data("organization", 210, _("Organization"), type=unicode))
+    meta.register(Data("version", 220, _("Version")))
+
+
+    meta.register(Data("width", 301, _("Image width"), filter=NumberFilter(1, MAX_WIDTH), type=(int, long), text_handler=humanPixelSize))
+    meta.register(Data("height", 302, _("Image height"), filter=NumberFilter(1, MAX_HEIGHT), type=(int, long), text_handler=humanPixelSize))
+    meta.register(Data("nb_channel", 303, _("Channel"), text_handler=humanAudioChannel, filter=NumberFilter(1, MAX_NB_CHANNEL), type=(int, long)))
+    meta.register(Data("sample_rate", 304, _("Sample rate"), text_handler=humanFrequency, filter=NumberFilter(MIN_SAMPLE_RATE, MAX_SAMPLE_RATE), type=(int, long, float)))
+    meta.register(Data("bits_per_sample", 305, _("Bits/sample"), text_handler=humanBitSize, filter=NumberFilter(1, 64), type=(int, long)))
+    meta.register(Data("image_orientation", 306, _("Image orientation")))
+    meta.register(Data("nb_colors", 307, _("Number of colors"), filter=NumberFilter(1, MAX_NB_COLOR), type=(int, long)))
+    meta.register(Data("bits_per_pixel", 308, _("Bits/pixel"), filter=NumberFilter(1, MAX_BITS_PER_PIXEL), type=(int, long)))
+    meta.register(Data("filename", 309, _("File name"), type=unicode))
+    meta.register(Data("file_size", 310, _("File size"), text_handler=humanFilesize, type=(int, long)))
+    meta.register(Data("pixel_format", 311, _("Pixel format")))
+    meta.register(Data("compr_size", 312, _("Compressed file size"), text_handler=humanFilesize, type=(int, long)))
+    meta.register(Data("compr_rate", 313, _("Compression rate"), text_handler=humanComprRate, filter=NumberFilter(MIN_COMPR_RATE, MAX_COMPR_RATE), type=(int, long, float)))
+
+    meta.register(Data("width_dpi", 320, _("Image DPI width"), filter=NumberFilter(1, MAX_DPI_WIDTH), type=(int, long), text_handler=humanDPI))
+    meta.register(Data("height_dpi", 321, _("Image DPI height"), filter=NumberFilter(1, MAX_DPI_HEIGHT), type=(int, long), text_handler=humanDPI))
+
+    meta.register(Data("file_attr", 400, _("File attributes")))
+    meta.register(Data("file_type", 401, _("File type")))
+    meta.register(Data("subtitle_author", 402, _("Subtitle author"), type=unicode))
+
+    meta.register(Data("creation_date", 500, _("Creation date"), text_handler=humanDatetime,
+        filter=DATETIME_FILTER, type=(datetime, date), conversion=setDatetime))
+    meta.register(Data("last_modification", 501, _("Last modification"), text_handler=humanDatetime,
+        filter=DATETIME_FILTER, type=(datetime, date), conversion=setDatetime))
+    meta.register(Data("latitude", 510, _("Latitude"), type=float))
+    meta.register(Data("longitude", 511, _("Longitude"), type=float))
+    meta.register(Data("altitude", 511, _("Altitude"), type=float, text_handler=humanAltitude))
+    meta.register(Data("location", 530, _("Location"), type=unicode))
+    meta.register(Data("city", 531, _("City"), type=unicode))
+    meta.register(Data("country", 532, _("Country"), type=unicode))
+    meta.register(Data("charset", 540, _("Charset"), type=unicode))
+    meta.register(Data("font_weight", 550, _("Font weight")))
+
+    meta.register(Data("camera_aperture", 520, _("Camera aperture")))
+    meta.register(Data("camera_focal", 521, _("Camera focal")))
+    meta.register(Data("camera_exposure", 522, _("Camera exposure")))
+    meta.register(Data("camera_brightness", 530, _("Camera brightness")))
+    meta.register(Data("camera_model", 531, _("Camera model"), type=unicode))
+    meta.register(Data("camera_manufacturer", 532, _("Camera manufacturer"), type=unicode))
+
+    meta.register(Data("compression", 600, _("Compression")))
+    meta.register(Data("copyright", 601, _("Copyright"), type=unicode))
+    meta.register(Data("url", 602, _("URL"), type=unicode))
+    meta.register(Data("frame_rate", 603, _("Frame rate"), text_handler=humanFrameRate,
+        filter=NumberFilter(1, MAX_FRAME_RATE), type=(int, long, float)))
+    meta.register(Data("bit_rate", 604, _("Bit rate"), text_handler=humanBitRate,
+        filter=NumberFilter(1, MAX_BIT_RATE), type=(int, long, float)))
+    meta.register(Data("aspect_ratio", 604, _("Aspect ratio"), type=(int, long, float)))
+
+    meta.register(Data("os", 900, _("OS"), type=unicode))
+    meta.register(Data("producer", 901, _("Producer"), type=unicode))
+    meta.register(Data("comment", 902, _("Comment"), type=unicode))
+    meta.register(Data("format_version", 950, _("Format version"), type=unicode))
+    meta.register(Data("mime_type", 951, _("MIME type"), type=unicode))
+    meta.register(Data("endian", 952, _("Endianness"), type=unicode))
+
diff --git a/lib/hachoir_metadata/riff.py b/lib/hachoir_metadata/riff.py
new file mode 100644
index 0000000000000000000000000000000000000000..adcc0bd9030eeb189459fcd33b09d1aac04f8a38
--- /dev/null
+++ b/lib/hachoir_metadata/riff.py
@@ -0,0 +1,190 @@
+"""
+Extract metadata from RIFF file format: AVI video and WAV sound.
+"""
+
+from hachoir_metadata.metadata import Metadata, MultipleMetadata, registerExtractor
+from hachoir_metadata.safe import fault_tolerant, getValue
+from hachoir_parser.container.riff import RiffFile
+from hachoir_parser.video.fourcc import UNCOMPRESSED_AUDIO
+from hachoir_core.tools import humanFilesize, makeUnicode, timedelta2seconds
+from hachoir_core.i18n import _
+from hachoir_metadata.audio import computeComprRate as computeAudioComprRate
+from datetime import timedelta
+
+class RiffMetadata(MultipleMetadata):
+    TAG_TO_KEY = {
+        "INAM": "title",
+        "IART": "artist",
+        "ICMT": "comment",
+        "ICOP": "copyright",
+        "IENG": "author",    # (engineer)
+        "ISFT": "producer",
+        "ICRD": "creation_date",
+        "IDIT": "creation_date",
+    }
+
+    def extract(self, riff):
+        type = riff["type"].value
+        if type == "WAVE":
+            self.extractWAVE(riff)
+            size = getValue(riff, "audio_data/size")
+            if size:
+                computeAudioComprRate(self, size*8)
+        elif type == "AVI ":
+            if "headers" in riff:
+                self.extractAVI(riff["headers"])
+                self.extractInfo(riff["headers"])
+        elif type == "ACON":
+            self.extractAnim(riff)
+        if "info" in riff:
+            self.extractInfo(riff["info"])
+
+    def processChunk(self, chunk):
+        if "text" not in chunk:
+            return
+        value = chunk["text"].value
+        tag = chunk["tag"].value
+        if tag not in self.TAG_TO_KEY:
+            self.warning("Skip RIFF metadata %s: %s" % (tag, value))
+            return
+        key = self.TAG_TO_KEY[tag]
+        setattr(self, key, value)
+
+    @fault_tolerant
+    def extractWAVE(self, wav):
+        format = wav["format"]
+
+        # Number of channel, bits/sample, sample rate
+        self.nb_channel = format["nb_channel"].value
+        self.bits_per_sample = format["bit_per_sample"].value
+        self.sample_rate = format["sample_per_sec"].value
+
+        self.compression = format["codec"].display
+        if "nb_sample/nb_sample" in wav \
+        and 0 < format["sample_per_sec"].value:
+            self.duration = timedelta(seconds=float(wav["nb_sample/nb_sample"].value) / format["sample_per_sec"].value)
+        if format["codec"].value in UNCOMPRESSED_AUDIO:
+            # Codec with fixed bit rate
+            self.bit_rate = format["nb_channel"].value * format["bit_per_sample"].value * format["sample_per_sec"].value
+            if not self.has("duration") \
+            and "audio_data/size" in wav \
+            and self.has("bit_rate"):
+                duration = float(wav["audio_data/size"].value)*8 / self.get('bit_rate')
+                self.duration = timedelta(seconds=duration)
+
+    def extractInfo(self, fieldset):
+        for field in fieldset:
+            if not field.is_field_set:
+                continue
+            if "tag" in field:
+                if field["tag"].value == "LIST":
+                    self.extractInfo(field)
+                else:
+                    self.processChunk(field)
+
+    @fault_tolerant
+    def extractAVIVideo(self, header, meta):
+        meta.compression = "%s (fourcc:\"%s\")" \
+            % (header["fourcc"].display, makeUnicode(header["fourcc"].value))
+        if header["rate"].value and header["scale"].value:
+            fps = float(header["rate"].value) / header["scale"].value
+            meta.frame_rate = fps
+            if 0 < fps:
+                self.duration = meta.duration = timedelta(seconds=float(header["length"].value) / fps)
+
+        if "../stream_fmt/width" in header:
+            format = header["../stream_fmt"]
+            meta.width = format["width"].value
+            meta.height = format["height"].value
+            meta.bits_per_pixel = format["depth"].value
+        else:
+            meta.width = header["right"].value - header["left"].value
+            meta.height = header["bottom"].value - header["top"].value
+
+    @fault_tolerant
+    def extractAVIAudio(self, format, meta):
+        meta.nb_channel = format["channel"].value
+        meta.sample_rate = format["sample_rate"].value
+        meta.bit_rate = format["bit_rate"].value * 8
+        if format["bits_per_sample"].value:
+            meta.bits_per_sample = format["bits_per_sample"].value
+        if "../stream_hdr" in format:
+            header = format["../stream_hdr"]
+            if header["rate"].value and header["scale"].value:
+                frame_rate = float(header["rate"].value) / header["scale"].value
+                meta.duration = timedelta(seconds=float(header["length"].value) / frame_rate)
+            if header["fourcc"].value != "":
+                meta.compression = "%s (fourcc:\"%s\")" \
+                    % (format["codec"].display, header["fourcc"].value)
+        if not meta.has("compression"):
+            meta.compression = format["codec"].display
+
+        self.computeAudioComprRate(meta)
+
+    @fault_tolerant
+    def computeAudioComprRate(self, meta):
+        uncompr = meta.get('bit_rate', 0)
+        if not uncompr:
+            return
+        compr = meta.get('nb_channel') * meta.get('sample_rate') * meta.get('bits_per_sample', default=16)
+        if not compr:
+            return
+        meta.compr_rate = float(compr) / uncompr
+
+    @fault_tolerant
+    def useAviHeader(self, header):
+        microsec = header["microsec_per_frame"].value
+        if microsec:
+            self.frame_rate = 1000000.0 / microsec
+            total_frame = getValue(header, "total_frame")
+            if total_frame and not self.has("duration"):
+                self.duration = timedelta(microseconds=total_frame * microsec)
+        self.width = header["width"].value
+        self.height = header["height"].value
+
+    def extractAVI(self, headers):
+        audio_index = 1
+        for stream in headers.array("stream"):
+            if "stream_hdr/stream_type" not in stream:
+                continue
+            stream_type = stream["stream_hdr/stream_type"].value
+            if stream_type == "vids":
+                if "stream_hdr" in stream:
+                    meta = Metadata(self)
+                    self.extractAVIVideo(stream["stream_hdr"], meta)
+                    self.addGroup("video", meta, "Video stream")
+            elif stream_type == "auds":
+                if "stream_fmt" in stream:
+                    meta = Metadata(self)
+                    self.extractAVIAudio(stream["stream_fmt"], meta)
+                    self.addGroup("audio[%u]" % audio_index, meta, "Audio stream")
+                    audio_index += 1
+        if "avi_hdr" in headers:
+            self.useAviHeader(headers["avi_hdr"])
+
+        # Compute global bit rate
+        if self.has("duration") and "/movie/size" in headers:
+            self.bit_rate = float(headers["/movie/size"].value) * 8 / timedelta2seconds(self.get('duration'))
+
+        # Video has index?
+        if "/index" in headers:
+            self.comment = _("Has audio/video index (%s)") \
+                % humanFilesize(headers["/index"].size/8)
+
+    @fault_tolerant
+    def extractAnim(self, riff):
+        if "anim_rate/rate[0]" in riff:
+            count = 0
+            total = 0
+            for rate in riff.array("anim_rate/rate"):
+                count += 1
+                if 100 < count:
+                    break
+                total += rate.value / 60.0
+            if count and total:
+                self.frame_rate = count / total
+        if not self.has("frame_rate") and "anim_hdr/jiffie_rate" in riff:
+            self.frame_rate = 60.0 / riff["anim_hdr/jiffie_rate"].value
+
+registerExtractor(RiffFile, RiffMetadata)
+
diff --git a/lib/hachoir_metadata/safe.py b/lib/hachoir_metadata/safe.py
new file mode 100644
index 0000000000000000000000000000000000000000..e1d91abb240d0526d2efd86d5983dee9efef5124
--- /dev/null
+++ b/lib/hachoir_metadata/safe.py
@@ -0,0 +1,27 @@
+from hachoir_core.error import HACHOIR_ERRORS, warning
+
+def fault_tolerant(func, *args):
+    def safe_func(*args, **kw):
+        try:
+            func(*args, **kw)
+        except HACHOIR_ERRORS, err:
+            warning("Error when calling function %s(): %s" % (
+                func.__name__, err))
+    return safe_func
+
+def getFieldAttribute(fieldset, key, attrname):
+    try:
+        field = fieldset[key]
+        if field.hasValue():
+            return getattr(field, attrname)
+    except HACHOIR_ERRORS, err:
+        warning("Unable to get %s of field %s/%s: %s" % (
+            attrname, fieldset.path, key, err))
+    return None
+
+def getValue(fieldset, key):
+    return getFieldAttribute(fieldset, key, "value")
+
+def getDisplay(fieldset, key):
+    return getFieldAttribute(fieldset, key, "display")
+
diff --git a/lib/hachoir_metadata/setter.py b/lib/hachoir_metadata/setter.py
new file mode 100644
index 0000000000000000000000000000000000000000..41da41407b80b7773ffa05fd8b1dd5ea23e8bf87
--- /dev/null
+++ b/lib/hachoir_metadata/setter.py
@@ -0,0 +1,171 @@
+from datetime import date, datetime
+import re
+from hachoir_core.language import Language
+from locale import setlocale, LC_ALL
+from time import strptime
+from hachoir_metadata.timezone import createTimezone
+from hachoir_metadata import config
+
+NORMALIZE_REGEX = re.compile("[-/.: ]+")
+YEAR_REGEX1 = re.compile("^([0-9]{4})$")
+
+# Date regex: YYYY-MM-DD (US format)
+DATE_REGEX1 = re.compile("^([0-9]{4})~([01][0-9])~([0-9]{2})$")
+
+# Date regex: YYYY-MM-DD HH:MM:SS (US format)
+DATETIME_REGEX1 = re.compile("^([0-9]{4})~([01][0-9])~([0-9]{2})~([0-9]{1,2})~([0-9]{2})~([0-9]{2})$")
+
+# Datetime regex: "MM-DD-YYYY HH:MM:SS" (FR format)
+DATETIME_REGEX2 = re.compile("^([01]?[0-9])~([0-9]{2})~([0-9]{4})~([0-9]{1,2})~([0-9]{2})~([0-9]{2})$")
+
+# Timezone regex: "(...) +0200"
+TIMEZONE_REGEX = re.compile("^(.*)~([+-][0-9]{2})00$")
+
+# Timestmap: 'February 2007'
+MONTH_YEAR = "%B~%Y"
+
+# Timestmap: 'Sun Feb 24 15:51:09 2008'
+RIFF_TIMESTAMP = "%a~%b~%d~%H~%M~%S~%Y"
+
+# Timestmap: 'Thu, 19 Jul 2007 09:03:57'
+ISO_TIMESTAMP = "%a,~%d~%b~%Y~%H~%M~%S"
+
+def parseDatetime(value):
+    """
+    Year and date:
+    >>> parseDatetime("2000")
+    (datetime.date(2000, 1, 1), u'2000')
+    >>> parseDatetime("2004-01-02")
+    datetime.date(2004, 1, 2)
+
+    Timestamp:
+    >>> parseDatetime("2004-01-02 18:10:45")
+    datetime.datetime(2004, 1, 2, 18, 10, 45)
+    >>> parseDatetime("2004-01-02 18:10:45")
+    datetime.datetime(2004, 1, 2, 18, 10, 45)
+
+    Timestamp with timezone:
+    >>> parseDatetime(u'Thu, 19 Jul 2007 09:03:57 +0000')
+    datetime.datetime(2007, 7, 19, 9, 3, 57, tzinfo=<TimezoneUTC delta=0, name=u'UTC'>)
+    >>> parseDatetime(u'Thu, 19 Jul 2007 09:03:57 +0200')
+    datetime.datetime(2007, 7, 19, 9, 3, 57, tzinfo=<Timezone delta=2:00:00, name='+0200'>)
+    """
+    value = NORMALIZE_REGEX.sub("~", value.strip())
+    regs = YEAR_REGEX1.match(value)
+    if regs:
+        try:
+            year = int(regs.group(1))
+            return (date(year, 1, 1), unicode(year))
+        except ValueError:
+            pass
+    regs = DATE_REGEX1.match(value)
+    if regs:
+        try:
+            year = int(regs.group(1))
+            month = int(regs.group(2))
+            day = int(regs.group(3))
+            return date(year, month, day)
+        except ValueError:
+            pass
+    regs = DATETIME_REGEX1.match(value)
+    if regs:
+        try:
+            year = int(regs.group(1))
+            month = int(regs.group(2))
+            day = int(regs.group(3))
+            hour = int(regs.group(4))
+            min = int(regs.group(5))
+            sec = int(regs.group(6))
+            return datetime(year, month, day, hour, min, sec)
+        except ValueError:
+            pass
+    regs = DATETIME_REGEX2.match(value)
+    if regs:
+        try:
+            month = int(regs.group(1))
+            day = int(regs.group(2))
+            year = int(regs.group(3))
+            hour = int(regs.group(4))
+            min = int(regs.group(5))
+            sec = int(regs.group(6))
+            return datetime(year, month, day, hour, min, sec)
+        except ValueError:
+            pass
+    current_locale = setlocale(LC_ALL, "C")
+    try:
+        match = TIMEZONE_REGEX.match(value)
+        if match:
+            without_timezone = match.group(1)
+            delta = int(match.group(2))
+            delta = createTimezone(delta)
+        else:
+            without_timezone = value
+            delta = None
+        try:
+            timestamp = strptime(without_timezone, ISO_TIMESTAMP)
+            arguments = list(timestamp[0:6]) + [0, delta]
+            return datetime(*arguments)
+        except ValueError:
+            pass
+
+        try:
+            timestamp = strptime(without_timezone, RIFF_TIMESTAMP)
+            arguments = list(timestamp[0:6]) + [0, delta]
+            return datetime(*arguments)
+        except ValueError:
+            pass
+
+        try:
+            timestamp = strptime(value, MONTH_YEAR)
+            arguments = list(timestamp[0:3])
+            return date(*arguments)
+        except ValueError:
+            pass
+    finally:
+        setlocale(LC_ALL, current_locale)
+    return None
+
+def setDatetime(meta, key, value):
+    if isinstance(value, (str, unicode)):
+        return parseDatetime(value)
+    elif isinstance(value, (date, datetime)):
+        return value
+    return None
+
+def setLanguage(meta, key, value):
+    """
+    >>> setLanguage(None, None, "fre")
+    <Language 'French', code='fre'>
+    >>> setLanguage(None, None, u"ger")
+    <Language 'German', code='ger'>
+    """
+    return Language(value)
+
+def setTrackTotal(meta, key, total):
+    """
+    >>> setTrackTotal(None, None, "10")
+    10
+    """
+    try:
+        return int(total)
+    except ValueError:
+        meta.warning("Invalid track total: %r" % total)
+        return None
+
+def setTrackNumber(meta, key, number):
+    if isinstance(number, (int, long)):
+        return number
+    if "/" in number:
+        number, total = number.split("/", 1)
+        meta.track_total = total
+    try:
+        return int(number)
+    except ValueError:
+        meta.warning("Invalid track number: %r" % number)
+        return None
+
+def normalizeString(text):
+    if config.RAW_OUTPUT:
+        return text
+    return text.strip(" \t\v\n\r\0")
+
diff --git a/lib/hachoir_metadata/timezone.py b/lib/hachoir_metadata/timezone.py
new file mode 100644
index 0000000000000000000000000000000000000000..324e56e6031abf71255933294e2e5b95373dfda5
--- /dev/null
+++ b/lib/hachoir_metadata/timezone.py
@@ -0,0 +1,42 @@
+from datetime import tzinfo, timedelta
+
+class TimezoneUTC(tzinfo):
+    """UTC timezone"""
+    ZERO = timedelta(0)
+
+    def utcoffset(self, dt):
+        return TimezoneUTC.ZERO
+
+    def tzname(self, dt):
+        return u"UTC"
+
+    def dst(self, dt):
+        return TimezoneUTC.ZERO
+
+    def __repr__(self):
+        return "<TimezoneUTC delta=0, name=u'UTC'>"
+
+class Timezone(TimezoneUTC):
+    """Fixed offset in hour from UTC."""
+    def __init__(self, offset):
+        self._offset = timedelta(minutes=offset*60)
+        self._name = u"%+03u00" % offset
+
+    def utcoffset(self, dt):
+        return self._offset
+
+    def tzname(self, dt):
+        return self._name
+
+    def __repr__(self):
+        return "<Timezone delta=%s, name='%s'>" % (
+            self._offset, self._name)
+
+UTC = TimezoneUTC()
+
+def createTimezone(offset):
+    if offset:
+        return Timezone(offset)
+    else:
+        return UTC
+
diff --git a/lib/hachoir_metadata/version.py b/lib/hachoir_metadata/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..03a3bae2d4487ec8218d7924428c53fa2b496675
--- /dev/null
+++ b/lib/hachoir_metadata/version.py
@@ -0,0 +1,5 @@
+PACKAGE = "hachoir-metadata"
+VERSION = "1.3.3"
+WEBSITE = "http://bitbucket.org/haypo/hachoir/wiki/hachoir-metadata"
+LICENSE = "GNU GPL v2"
+
diff --git a/lib/hachoir_metadata/video.py b/lib/hachoir_metadata/video.py
new file mode 100644
index 0000000000000000000000000000000000000000..215ef225a667caead77b5abad687ed2b13fdcda7
--- /dev/null
+++ b/lib/hachoir_metadata/video.py
@@ -0,0 +1,412 @@
+from hachoir_core.field import MissingField
+from hachoir_metadata.metadata import (registerExtractor,
+    Metadata, RootMetadata, MultipleMetadata)
+from hachoir_metadata.metadata_item import QUALITY_GOOD
+from hachoir_metadata.safe import fault_tolerant
+from hachoir_parser.video import MovFile, AsfFile, FlvFile
+from hachoir_parser.video.asf import Descriptor as ASF_Descriptor
+from hachoir_parser.container import MkvFile
+from hachoir_parser.container.mkv import dateToDatetime
+from hachoir_core.i18n import _
+from hachoir_core.tools import makeUnicode, makePrintable, timedelta2seconds
+from datetime import timedelta
+
+class MkvMetadata(MultipleMetadata):
+    tag_key = {
+        "TITLE": "title",
+        "URL": "url",
+        "COPYRIGHT": "copyright",
+
+        # TODO: use maybe another name?
+        # Its value may be different than (...)/Info/DateUTC/date
+        "DATE_RECORDED": "creation_date",
+
+        # TODO: Extract subtitle metadata
+        "SUBTITLE": "subtitle_author",
+    }
+
+    def extract(self, mkv):
+        for segment in mkv.array("Segment"):
+            self.processSegment(segment)
+
+    def processSegment(self, segment):
+        for field in segment:
+            if field.name.startswith("Info["):
+                self.processInfo(field)
+            elif field.name.startswith("Tags["):
+                for tag in field.array("Tag"):
+                    self.processTag(tag)
+            elif field.name.startswith("Tracks["):
+                self.processTracks(field)
+            elif field.name.startswith("Cluster["):
+                if self.quality < QUALITY_GOOD:
+                    return
+
+    def processTracks(self, tracks):
+        for entry in tracks.array("TrackEntry"):
+            self.processTrack(entry)
+
+    def processTrack(self, track):
+        if "TrackType/enum" not in track:
+            return
+        if track["TrackType/enum"].display == "video":
+            self.processVideo(track)
+        elif track["TrackType/enum"].display == "audio":
+            self.processAudio(track)
+        elif track["TrackType/enum"].display == "subtitle":
+            self.processSubtitle(track)
+
+    def trackCommon(self, track, meta):
+        if "Name/unicode" in track:
+            meta.title = track["Name/unicode"].value
+        if "Language/string" in track \
+        and track["Language/string"].value not in ("mis", "und"):
+            meta.language = track["Language/string"].value
+
+    def processVideo(self, track):
+        video = Metadata(self)
+        self.trackCommon(track, video)
+        try:
+            video.compression = track["CodecID/string"].value
+            if "Video" in track:
+                video.width = track["Video/PixelWidth/unsigned"].value
+                video.height = track["Video/PixelHeight/unsigned"].value
+        except MissingField:
+            pass
+        self.addGroup("video[]", video, "Video stream")
+
+    def getDouble(self, field, parent):
+        float_key = '%s/float' % parent
+        if float_key in field:
+            return field[float_key].value
+        double_key = '%s/double' % parent
+        if double_key in field:
+            return field[double_key].value
+        return None
+
+    def processAudio(self, track):
+        audio = Metadata(self)
+        self.trackCommon(track, audio)
+        if "Audio" in track:
+            frequency = self.getDouble(track, "Audio/SamplingFrequency")
+            if frequency is not None:
+                audio.sample_rate = frequency
+            if "Audio/Channels/unsigned" in track:
+                audio.nb_channel = track["Audio/Channels/unsigned"].value
+            if "Audio/BitDepth/unsigned" in track:
+                audio.bits_per_sample = track["Audio/BitDepth/unsigned"].value
+        if "CodecID/string" in track:
+            audio.compression = track["CodecID/string"].value
+        self.addGroup("audio[]", audio, "Audio stream")
+
+    def processSubtitle(self, track):
+        sub = Metadata(self)
+        self.trackCommon(track, sub)
+        try:
+            sub.compression = track["CodecID/string"].value
+        except MissingField:
+            pass
+        self.addGroup("subtitle[]", sub, "Subtitle")
+
+    def processTag(self, tag):
+        for field in tag.array("SimpleTag"):
+            self.processSimpleTag(field)
+
+    def processSimpleTag(self, tag):
+        if "TagName/unicode" not in tag \
+        or "TagString/unicode" not in tag:
+            return
+        name = tag["TagName/unicode"].value
+        if name not in self.tag_key:
+            return
+        key = self.tag_key[name]
+        value = tag["TagString/unicode"].value
+        setattr(self, key, value)
+
+    def processInfo(self, info):
+        if "TimecodeScale/unsigned" in info:
+            duration = self.getDouble(info, "Duration")
+            if duration is not None:
+                try:
+                    seconds = duration * info["TimecodeScale/unsigned"].value * 1e-9
+                    self.duration = timedelta(seconds=seconds)
+                except OverflowError:
+                    # Catch OverflowError for timedelta (long int too large
+                    # to be converted to an int)
+                    pass
+        if "DateUTC/date" in info:
+            try:
+                self.creation_date = dateToDatetime(info["DateUTC/date"].value)
+            except OverflowError:
+                pass
+        if "WritingApp/unicode" in info:
+            self.producer = info["WritingApp/unicode"].value
+        if "MuxingApp/unicode" in info:
+            self.producer = info["MuxingApp/unicode"].value
+        if "Title/unicode" in info:
+            self.title = info["Title/unicode"].value
+
+class FlvMetadata(MultipleMetadata):
+    def extract(self, flv):
+        if "video[0]" in flv:
+            meta = Metadata(self)
+            self.extractVideo(flv["video[0]"], meta)
+            self.addGroup("video", meta, "Video stream")
+        if "audio[0]" in flv:
+            meta = Metadata(self)
+            self.extractAudio(flv["audio[0]"], meta)
+            self.addGroup("audio", meta, "Audio stream")
+        # TODO: Computer duration
+        # One technic: use last video/audio chunk and use timestamp
+        # But this is very slow
+        self.format_version = flv.description
+
+        if "metadata/entry[1]" in flv:
+            self.extractAMF(flv["metadata/entry[1]"])
+        if self.has('duration'):
+            self.bit_rate = flv.size / timedelta2seconds(self.get('duration'))
+
+    @fault_tolerant
+    def extractAudio(self, audio, meta):
+        if audio["codec"].display == "MP3" and "music_data" in audio:
+            meta.compression = audio["music_data"].description
+        else:
+            meta.compression = audio["codec"].display
+        meta.sample_rate = audio.getSampleRate()
+        if audio["is_16bit"].value:
+            meta.bits_per_sample = 16
+        else:
+            meta.bits_per_sample = 8
+        if audio["is_stereo"].value:
+            meta.nb_channel = 2
+        else:
+            meta.nb_channel = 1
+
+    @fault_tolerant
+    def extractVideo(self, video, meta):
+        meta.compression = video["codec"].display
+
+    def extractAMF(self, amf):
+        for entry in amf.array("item"):
+            self.useAmfEntry(entry)
+
+    @fault_tolerant
+    def useAmfEntry(self, entry):
+        key = entry["key"].value
+        if key == "duration":
+            self.duration = timedelta(seconds=entry["value"].value)
+        elif key == "creator":
+            self.producer = entry["value"].value
+        elif key == "audiosamplerate":
+            self.sample_rate = entry["value"].value
+        elif key == "framerate":
+            self.frame_rate = entry["value"].value
+        elif key == "metadatacreator":
+            self.producer = entry["value"].value
+        elif key == "metadatadate":
+            self.creation_date = entry.value
+        elif key == "width":
+            self.width = int(entry["value"].value)
+        elif key == "height":
+            self.height = int(entry["value"].value)
+
+class MovMetadata(RootMetadata):
+    def extract(self, mov):
+        for atom in mov:
+            if "movie" in atom:
+                self.processMovie(atom["movie"])
+
+    @fault_tolerant
+    def processMovieHeader(self, hdr):
+        self.creation_date = hdr["creation_date"].value
+        self.last_modification = hdr["lastmod_date"].value
+        self.duration = timedelta(seconds=float(hdr["duration"].value) / hdr["time_scale"].value)
+        self.comment = _("Play speed: %.1f%%") % (hdr["play_speed"].value*100)
+        self.comment = _("User volume: %.1f%%") % (float(hdr["volume"].value)*100//255)
+
+    @fault_tolerant
+    def processTrackHeader(self, hdr):
+        width = int(hdr["frame_size_width"].value)
+        height = int(hdr["frame_size_height"].value)
+        if width and height:
+            self.width = width
+            self.height = height
+
+    def processTrack(self, atom):
+        for field in atom:
+            if "track_hdr" in field:
+                self.processTrackHeader(field["track_hdr"])
+
+    def processMovie(self, atom):
+        for field in atom:
+            if "track" in field:
+                self.processTrack(field["track"])
+            if "movie_hdr" in field:
+                self.processMovieHeader(field["movie_hdr"])
+
+
+class AsfMetadata(MultipleMetadata):
+    EXT_DESC_TO_ATTR = {
+        "Encoder": "producer",
+        "ToolName": "producer",
+        "AlbumTitle": "album",
+        "Track": "track_number",
+        "TrackNumber": "track_total",
+        "Year": "creation_date",
+        "AlbumArtist": "author",
+    }
+    SKIP_EXT_DESC = set((
+        # Useless informations
+        "WMFSDKNeeded", "WMFSDKVersion",
+        "Buffer Average", "VBR Peak", "EncodingTime",
+        "MediaPrimaryClassID", "UniqueFileIdentifier",
+    ))
+
+    def extract(self, asf):
+        if "header/content" in asf:
+            self.processHeader(asf["header/content"])
+
+    def processHeader(self, header):
+        compression = []
+        is_vbr = None
+
+        if "ext_desc/content" in header:
+            # Extract all data from ext_desc
+            data = {}
+            for desc in header.array("ext_desc/content/descriptor"):
+                self.useExtDescItem(desc, data)
+
+            # Have ToolName and ToolVersion? If yes, group them to producer key
+            if "ToolName" in data and "ToolVersion" in data:
+                self.producer = "%s (version %s)" % (data["ToolName"], data["ToolVersion"])
+                del data["ToolName"]
+                del data["ToolVersion"]
+
+            # "IsVBR" key
+            if "IsVBR" in data:
+                is_vbr = (data["IsVBR"] == 1)
+                del data["IsVBR"]
+
+            # Store data
+            for key, value in data.iteritems():
+                if key in self.EXT_DESC_TO_ATTR:
+                    key = self.EXT_DESC_TO_ATTR[key]
+                else:
+                    if isinstance(key, str):
+                        key = makePrintable(key, "ISO-8859-1", to_unicode=True)
+                    value = "%s=%s" % (key, value)
+                    key = "comment"
+                setattr(self, key, value)
+
+        if "file_prop/content" in header:
+            self.useFileProp(header["file_prop/content"], is_vbr)
+
+        if "codec_list/content" in header:
+            for codec in header.array("codec_list/content/codec"):
+                if "name" in codec:
+                    text = codec["name"].value
+                    if "desc" in codec and codec["desc"].value:
+                        text = "%s (%s)" % (text, codec["desc"].value)
+                    compression.append(text)
+
+        audio_index = 1
+        video_index = 1
+        for index, stream_prop in enumerate(header.array("stream_prop")):
+            if "content/audio_header" in stream_prop:
+                meta = Metadata(self)
+                self.streamProperty(header, index, meta)
+                self.streamAudioHeader(stream_prop["content/audio_header"], meta)
+                if self.addGroup("audio[%u]" % audio_index, meta, "Audio stream #%u" % audio_index):
+                    audio_index += 1
+            elif "content/video_header" in stream_prop:
+                meta = Metadata(self)
+                self.streamProperty(header, index, meta)
+                self.streamVideoHeader(stream_prop["content/video_header"], meta)
+                if self.addGroup("video[%u]" % video_index, meta, "Video stream #%u" % video_index):
+                    video_index += 1
+
+        if "metadata/content" in header:
+            info = header["metadata/content"]
+            try:
+                self.title = info["title"].value
+                self.author = info["author"].value
+                self.copyright = info["copyright"].value
+            except MissingField:
+                pass
+
+    @fault_tolerant
+    def streamAudioHeader(self, audio, meta):
+        if not meta.has("compression"):
+            meta.compression = audio["twocc"].display
+        meta.nb_channel = audio["channels"].value
+        meta.sample_rate = audio["sample_rate"].value
+        meta.bits_per_sample = audio["bits_per_sample"].value
+
+    @fault_tolerant
+    def streamVideoHeader(self, video, meta):
+        meta.width = video["width"].value
+        meta.height = video["height"].value
+        if "bmp_info" in video:
+            bmp_info = video["bmp_info"]
+            if not meta.has("compression"):
+                meta.compression = bmp_info["codec"].display
+            meta.bits_per_pixel = bmp_info["bpp"].value
+
+    @fault_tolerant
+    def useExtDescItem(self, desc, data):
+        if desc["type"].value == ASF_Descriptor.TYPE_BYTE_ARRAY:
+            # Skip binary data
+            return
+        key = desc["name"].value
+        if "/" in key:
+            # Replace "WM/ToolName" with "ToolName"
+            key = key.split("/", 1)[1]
+        if key in self.SKIP_EXT_DESC:
+            # Skip some keys
+            return
+        value = desc["value"].value
+        if not value:
+            return
+        value = makeUnicode(value)
+        data[key] = value
+
+    @fault_tolerant
+    def useFileProp(self, prop, is_vbr):
+        self.creation_date = prop["creation_date"].value
+        self.duration = prop["play_duration"].value
+        if prop["seekable"].value:
+            self.comment = u"Is seekable"
+        value = prop["max_bitrate"].value
+        text = prop["max_bitrate"].display
+        if is_vbr is True:
+            text = "VBR (%s max)" % text
+        elif is_vbr is False:
+            text = "%s (CBR)" % text
+        else:
+            text = "%s (max)" % text
+        self.bit_rate = (value, text)
+
+    def streamProperty(self, header, index, meta):
+        key = "bit_rates/content/bit_rate[%u]/avg_bitrate" % index
+        if key in header:
+            meta.bit_rate = header[key].value
+
+        # TODO: Use codec list
+        # It doesn't work when the video uses /header/content/bitrate_mutex
+        # since the codec list are shared between streams but... how is it
+        # shared?
+#        key = "codec_list/content/codec[%u]" % index
+#        if key in header:
+#            codec = header[key]
+#            if "name" in codec:
+#                text = codec["name"].value
+#                if "desc" in codec and codec["desc"].value:
+#                    meta.compression = "%s (%s)" % (text, codec["desc"].value)
+#                else:
+#                    meta.compression = text
+
+registerExtractor(MovFile, MovMetadata)
+registerExtractor(AsfFile, AsfMetadata)
+registerExtractor(FlvFile, FlvMetadata)
+registerExtractor(MkvFile, MkvMetadata)
+
diff --git a/lib/hachoir_parser/__init__.py b/lib/hachoir_parser/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b9860abb7cfff7bd2666b8cf8aeac9d0f237974
--- /dev/null
+++ b/lib/hachoir_parser/__init__.py
@@ -0,0 +1,7 @@
+from hachoir_parser.version import __version__
+from hachoir_parser.parser import ValidateError, HachoirParser, Parser
+from hachoir_parser.parser_list import ParserList, HachoirParserList
+from hachoir_parser.guess import (QueryParser, guessParser, createParser)
+from hachoir_parser import (archive, audio, container,
+    file_system, image, game, misc, network, program, video)
+
diff --git a/lib/hachoir_parser/archive/__init__.py b/lib/hachoir_parser/archive/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ecd09e8f8b2968459bf6202c6003cb42a1efc5cf
--- /dev/null
+++ b/lib/hachoir_parser/archive/__init__.py
@@ -0,0 +1,12 @@
+from hachoir_parser.archive.ace import AceFile
+from hachoir_parser.archive.ar import ArchiveFile
+from hachoir_parser.archive.bzip2_parser import Bzip2Parser
+from hachoir_parser.archive.cab import CabFile
+from hachoir_parser.archive.gzip_parser import GzipParser
+from hachoir_parser.archive.tar import TarFile
+from hachoir_parser.archive.zip import ZipFile
+from hachoir_parser.archive.rar import RarFile
+from hachoir_parser.archive.rpm import RpmFile
+from hachoir_parser.archive.sevenzip import SevenZipParser
+from hachoir_parser.archive.mar import MarFile
+
diff --git a/lib/hachoir_parser/archive/ace.py b/lib/hachoir_parser/archive/ace.py
new file mode 100644
index 0000000000000000000000000000000000000000..03652920f1878dca539c92c47a511fba84ed56c7
--- /dev/null
+++ b/lib/hachoir_parser/archive/ace.py
@@ -0,0 +1,267 @@
+"""
+ACE parser
+
+From wotsit.org and the SDK header (bitflags)
+
+Partial study of a new block type (5) I've called "new_recovery", as its
+syntax is very close to the former one (of type 2).
+
+Status: can only read totally file and header blocks.
+Author: Christophe Gisquet <christophe.gisquet@free.fr>
+Creation date: 19 january 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (StaticFieldSet, FieldSet,
+    Bit, Bits, NullBits, RawBytes, Enum,
+    UInt8, UInt16, UInt32,
+    PascalString8, PascalString16, String,
+    TimeDateMSDOS32)
+from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_parser.common.msdos import MSDOSFileAttr32
+
+MAGIC = "**ACE**"
+
+OS_MSDOS = 0
+OS_WIN32 = 2
+HOST_OS = {
+    0: "MS-DOS",
+    1: "OS/2",
+    2: "Win32",
+    3: "Unix",
+    4: "MAC-OS",
+    5: "Win NT",
+    6: "Primos",
+    7: "APPLE GS",
+    8: "ATARI",
+    9: "VAX VMS",
+    10: "AMIGA",
+    11: "NEXT",
+}
+
+COMPRESSION_TYPE = {
+    0: "Store",
+    1: "Lempel-Ziv 77",
+    2: "ACE v2.0",
+}
+
+COMPRESSION_MODE = {
+    0: "fastest",
+    1: "fast",
+    2: "normal",
+    3: "good",
+    4: "best",
+}
+
+# TODO: Computing the CRC16 would also prove useful
+#def markerValidate(self):
+#    return not self["extend"].value and self["signature"].value == MAGIC and \
+#           self["host_os"].value<12
+
+class MarkerFlags(StaticFieldSet):
+    format = (
+        (Bit, "extend", "Whether the header is extended"),
+        (Bit, "has_comment", "Whether the archive has a comment"),
+        (NullBits, "unused", 7, "Reserved bits"),
+        (Bit, "sfx", "SFX"),
+        (Bit, "limited_dict", "Junior SFX with 256K dictionary"),
+        (Bit, "multi_volume", "Part of a set of ACE archives"),
+        (Bit, "has_av_string", "This header holds an AV-string"),
+        (Bit, "recovery_record", "Recovery record preset"),
+        (Bit, "locked", "Archive is locked"),
+        (Bit, "solid", "Archive uses solid compression")
+    )
+
+def markerFlags(self):
+    yield MarkerFlags(self, "flags", "Marker flags")
+
+def markerHeader(self):
+    yield String(self, "signature", 7, "Signature")
+    yield UInt8(self, "ver_extract", "Version needed to extract archive")
+    yield UInt8(self, "ver_created", "Version used to create archive")
+    yield Enum(UInt8(self, "host_os", "OS where the files were compressed"), HOST_OS)
+    yield UInt8(self, "vol_num", "Volume number")
+    yield TimeDateMSDOS32(self, "time", "Date and time (MS DOS format)")
+    yield Bits(self, "reserved", 64, "Reserved size for future extensions")
+    flags = self["flags"]
+    if flags["has_av_string"].value:
+        yield PascalString8(self, "av_string", "AV String")
+    if flags["has_comment"].value:
+        size = filesizeHandler(UInt16(self, "comment_size", "Comment size"))
+        yield size
+        if size.value > 0:
+            yield RawBytes(self, "compressed_comment", size.value, \
+                           "Compressed comment")
+
+class FileFlags(StaticFieldSet):
+    format = (
+        (Bit, "extend", "Whether the header is extended"),
+        (Bit, "has_comment", "Presence of file comment"),
+        (Bits, "unused", 10, "Unused bit flags"),
+        (Bit, "encrypted", "File encrypted with password"),
+        (Bit, "previous", "File continued from previous volume"),
+        (Bit, "next", "File continues on the next volume"),
+        (Bit, "solid", "File compressed using previously archived files")
+    )
+
+def fileFlags(self):
+    yield FileFlags(self, "flags", "File flags")
+
+def fileHeader(self):
+    yield filesizeHandler(UInt32(self, "compressed_size", "Size of the compressed file"))
+    yield filesizeHandler(UInt32(self, "uncompressed_size", "Uncompressed file size"))
+    yield TimeDateMSDOS32(self, "ftime", "Date and time (MS DOS format)")
+    if self["/header/host_os"].value in (OS_MSDOS, OS_WIN32):
+        yield MSDOSFileAttr32(self, "file_attr", "File attributes")
+    else:
+        yield textHandler(UInt32(self, "file_attr", "File attributes"), hexadecimal)
+    yield textHandler(UInt32(self, "file_crc32", "CRC32 checksum over the compressed file)"), hexadecimal)
+    yield Enum(UInt8(self, "compression_type", "Type of compression"), COMPRESSION_TYPE)
+    yield Enum(UInt8(self, "compression_mode", "Quality of compression"), COMPRESSION_MODE)
+    yield textHandler(UInt16(self, "parameters", "Compression parameters"), hexadecimal)
+    yield textHandler(UInt16(self, "reserved", "Reserved data"), hexadecimal)
+    # Filename
+    yield PascalString16(self, "filename", "Filename")
+    # Comment
+    if self["flags/has_comment"].value:
+        yield filesizeHandler(UInt16(self, "comment_size", "Size of the compressed comment"))
+        if self["comment_size"].value > 0:
+            yield RawBytes(self, "comment_data", self["comment_size"].value, "Comment data")
+
+def fileBody(self):
+    size = self["compressed_size"].value
+    if size > 0:
+        yield RawBytes(self, "compressed_data", size, "Compressed data")
+
+def fileDesc(self):
+    return "File entry: %s (%s)" % (self["filename"].value, self["compressed_size"].display)
+
+def recoveryHeader(self):
+    yield filesizeHandler(UInt32(self, "rec_blk_size", "Size of recovery data"))
+    self.body_size = self["rec_blk_size"].size
+    yield String(self, "signature", 7, "Signature, normally '**ACE**'")
+    yield textHandler(UInt32(self, "relative_start",
+         "Relative start (to this block) of the data this block is mode of"),
+         hexadecimal)
+    yield UInt32(self, "num_blocks", "Number of blocks the data is split into")
+    yield UInt32(self, "size_blocks", "Size of these blocks")
+    yield UInt16(self, "crc16_blocks", "CRC16 over recovery data")
+    # size_blocks blocks of size size_blocks follow
+    # The ultimate data is the xor data of all those blocks
+    size = self["size_blocks"].value
+    for index in xrange(self["num_blocks"].value):
+        yield RawBytes(self, "data[]", size, "Recovery block %i" % index)
+    yield RawBytes(self, "xor_data", size, "The XOR value of the above data blocks")
+
+def recoveryDesc(self):
+    return "Recovery block, size=%u" % self["body_size"].display
+
+def newRecoveryHeader(self):
+    """
+    This header is described nowhere
+    """
+    if self["flags/extend"].value:
+        yield filesizeHandler(UInt32(self, "body_size", "Size of the unknown body following"))
+        self.body_size = self["body_size"].value
+    yield textHandler(UInt32(self, "unknown[]", "Unknown field, probably 0"),
+        hexadecimal)
+    yield String(self, "signature", 7, "Signature, normally '**ACE**'")
+    yield textHandler(UInt32(self, "relative_start",
+        "Offset (=crc16's) of this block in the file"), hexadecimal)
+    yield textHandler(UInt32(self, "unknown[]",
+        "Unknown field, probably 0"), hexadecimal)
+
+class BaseFlags(StaticFieldSet):
+    format = (
+        (Bit, "extend", "Whether the header is extended"),
+        (NullBits, "unused", 15, "Unused bit flags")
+    )
+
+def parseFlags(self):
+    yield BaseFlags(self, "flags", "Unknown flags")
+
+def parseHeader(self):
+    if self["flags/extend"].value:
+        yield filesizeHandler(UInt32(self, "body_size", "Size of the unknown body following"))
+        self.body_size = self["body_size"].value
+
+def parseBody(self):
+    if self.body_size > 0:
+        yield RawBytes(self, "body_data", self.body_size, "Body data, unhandled")
+
+class Block(FieldSet):
+    TAG_INFO = {
+        0: ("header", "Archiver header", markerFlags, markerHeader, None),
+        1: ("file[]", fileDesc, fileFlags, fileHeader, fileBody),
+        2: ("recovery[]", recoveryDesc, recoveryHeader, None, None),
+        5: ("new_recovery[]", None, None, newRecoveryHeader, None)
+    }
+
+    def __init__(self, parent, name, description=None):
+        FieldSet.__init__(self, parent, name, description)
+        self.body_size = 0
+        self.desc_func = None
+        type = self["block_type"].value
+        if type in self.TAG_INFO:
+            self._name, desc, self.parseFlags, self.parseHeader, self.parseBody = self.TAG_INFO[type]
+            if desc:
+                if isinstance(desc, str):
+                    self._description = desc
+                else:
+                    self.desc_func = desc
+        else:
+            self.warning("Processing as unknown block block of type %u" % type)
+        if not self.parseFlags:
+            self.parseFlags = parseFlags
+        if not self.parseHeader:
+            self.parseHeader = parseHeader
+        if not self.parseBody:
+            self.parseBody = parseBody
+
+    def createFields(self):
+        yield textHandler(UInt16(self, "crc16", "Archive CRC16 (from byte 4 on)"), hexadecimal)
+        yield filesizeHandler(UInt16(self, "head_size", "Block size (from byte 4 on)"))
+        yield UInt8(self, "block_type", "Block type")
+
+        # Flags
+        for flag in self.parseFlags(self):
+            yield flag
+
+        # Rest of the header
+        for field in self.parseHeader(self):
+            yield field
+        size = self["head_size"].value - (self.current_size//8) + (2+2)
+        if size > 0:
+            yield RawBytes(self, "extra_data", size, "Extra header data, unhandled")
+
+        # Body in itself
+        for field in self.parseBody(self):
+            yield field
+
+    def createDescription(self):
+        if self.desc_func:
+            return self.desc_func(self)
+        else:
+            return "Block: %s" % self["type"].display
+
+class AceFile(Parser):
+    endian = LITTLE_ENDIAN
+    PARSER_TAGS = {
+        "id": "ace",
+        "category": "archive",
+        "file_ext": ("ace",),
+        "mime": (u"application/x-ace-compressed",),
+        "min_size": 50*8,
+        "description": "ACE archive"
+    }
+
+    def validate(self):
+        if self.stream.readBytes(7*8, len(MAGIC)) != MAGIC:
+            return "Invalid magic"
+        return True
+
+    def createFields(self):
+        while not self.eof:
+            yield Block(self, "block[]")
+
diff --git a/lib/hachoir_parser/archive/ar.py b/lib/hachoir_parser/archive/ar.py
new file mode 100644
index 0000000000000000000000000000000000000000..421cdc53c2e16daa30eec9f9aa7bf3275e32fd64
--- /dev/null
+++ b/lib/hachoir_parser/archive/ar.py
@@ -0,0 +1,52 @@
+"""
+GNU ar archive : archive file (.a) and Debian (.deb) archive.
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    String, RawBytes, UnixLine)
+from hachoir_core.endian import BIG_ENDIAN
+
+class ArchiveFileEntry(FieldSet):
+    def createFields(self):
+        yield UnixLine(self, "header", "Header")
+        info = self["header"].value.split()
+        if len(info) != 7:
+            raise ParserError("Invalid file entry header")
+        size = int(info[5])
+        if 0 < size:
+            yield RawBytes(self, "content", size, "File data")
+
+    def createDescription(self):
+        return "File entry (%s)" % self["header"].value.split()[0]
+
+class ArchiveFile(Parser):
+    endian = BIG_ENDIAN
+    MAGIC = '!<arch>\n'
+    PARSER_TAGS = {
+        "id": "unix_archive",
+        "category": "archive",
+        "file_ext": ("a", "deb"),
+        "mime":
+            (u"application/x-debian-package",
+             u"application/x-archive",
+             u"application/x-dpkg"),
+        "min_size": (8 + 13)*8, # file signature + smallest file as possible
+        "magic": ((MAGIC, 0),),
+        "description": "Unix archive"
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC:
+            return "Invalid magic string"
+        return True
+
+    def createFields(self):
+        yield String(self, "id", 8, "Unix archive identifier (\"<!arch>\")", charset="ASCII")
+        while not self.eof:
+            data = self.stream.readBytes(self.current_size, 1)
+            if data == "\n":
+                yield RawBytes(self, "empty_line[]", 1, "Empty line")
+            else:
+                yield ArchiveFileEntry(self, "file[]", "File")
+
diff --git a/lib/hachoir_parser/archive/bzip2_parser.py b/lib/hachoir_parser/archive/bzip2_parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..bec1d0e1cdaec315c67081bb627e210683400562
--- /dev/null
+++ b/lib/hachoir_parser/archive/bzip2_parser.py
@@ -0,0 +1,85 @@
+"""
+BZIP2 archive file
+
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (ParserError, String,
+    Bytes, Character, UInt8, UInt32, CompressedField)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+try:
+    from bz2 import BZ2Decompressor
+
+    class Bunzip2:
+        def __init__(self, stream):
+            self.bzip2 = BZ2Decompressor()
+
+        def __call__(self, size, data=''):
+            try:
+                return self.bzip2.decompress(data)
+            except EOFError:
+                return ''
+
+    has_deflate = True
+except ImportError:
+    has_deflate = False
+
+class Bzip2Parser(Parser):
+    PARSER_TAGS = {
+        "id": "bzip2",
+        "category": "archive",
+        "file_ext": ("bz2",),
+        "mime": (u"application/x-bzip2",),
+        "min_size": 10*8,
+        "magic": (('BZh', 0),),
+        "description": "bzip2 archive"
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 3) != 'BZh':
+            return "Wrong file signature"
+        if not("1" <= self["blocksize"].value <= "9"):
+            return "Wrong blocksize"
+        return True
+
+    def createFields(self):
+        yield String(self, "id", 3, "Identifier (BZh)", charset="ASCII")
+        yield Character(self, "blocksize", "Block size (KB of memory needed to uncompress)")
+
+        yield UInt8(self, "blockheader", "Block header")
+        if self["blockheader"].value == 0x17:
+            yield String(self, "id2", 4, "Identifier2 (re8P)", charset="ASCII")
+            yield UInt8(self, "id3", "Identifier3 (0x90)")
+        elif self["blockheader"].value == 0x31:
+            yield String(self, "id2", 5, "Identifier 2 (AY&SY)", charset="ASCII")
+            if self["id2"].value != "AY&SY":
+                raise ParserError("Invalid identifier 2 (AY&SY)!")
+        else:
+            raise ParserError("Invalid block header!")
+        yield textHandler(UInt32(self, "crc32", "CRC32"), hexadecimal)
+
+        if self._size is None: # TODO: is it possible to handle piped input?
+            raise NotImplementedError
+
+        size = (self._size - self.current_size)/8
+        if size:
+            for tag, filename in self.stream.tags:
+                if tag == "filename" and filename.endswith(".bz2"):
+                    filename = filename[:-4]
+                    break
+            else:
+                filename = None
+            data = Bytes(self, "file", size)
+            if has_deflate:
+                CompressedField(self, Bunzip2)
+                def createInputStream(**args):
+                    if filename:
+                        args.setdefault("tags",[]).append(("filename", filename))
+                    return self._createInputStream(**args)
+                data._createInputStream = createInputStream
+            yield data
+
diff --git a/lib/hachoir_parser/archive/cab.py b/lib/hachoir_parser/archive/cab.py
new file mode 100644
index 0000000000000000000000000000000000000000..856b01eebc46c60479f29e9722f92e88988e5cd6
--- /dev/null
+++ b/lib/hachoir_parser/archive/cab.py
@@ -0,0 +1,125 @@
+"""
+Microsoft Cabinet (CAB) archive.
+
+Author: Victor Stinner
+Creation date: 31 january 2007
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, Enum,
+    CString, String,
+    UInt16, UInt32, Bit, Bits, PaddingBits, NullBits,
+    DateTimeMSDOS32, RawBytes)
+from hachoir_parser.common.msdos import MSDOSFileAttr16
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+from hachoir_core.endian import LITTLE_ENDIAN
+
+MAX_NB_FOLDER = 30
+
+COMPRESSION_NONE = 0
+COMPRESSION_NAME = {
+    0: "Uncompressed",
+    1: "Deflate",
+    2: "Quantum",
+    3: "LZX",
+}
+
+class Folder(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "off_data", "Offset of data")
+        yield UInt16(self, "cf_data")
+        yield Enum(Bits(self, "compr_method", 4, "Compression method"), COMPRESSION_NAME)
+        yield Bits(self, "compr_level", 5, "Compression level")
+        yield PaddingBits(self, "padding", 7)
+
+    def createDescription(self):
+        text= "Folder: compression %s" % self["compr_method"].display
+        if self["compr_method"].value != COMPRESSION_NONE:
+            text += " (level %u)" % self["compr_level"].value
+        return text
+
+class File(FieldSet):
+    def createFields(self):
+        yield filesizeHandler(UInt32(self, "filesize", "Uncompressed file size"))
+        yield UInt32(self, "offset", "File offset after decompression")
+        yield UInt16(self, "iFolder", "file control id")
+        yield DateTimeMSDOS32(self, "timestamp")
+        yield MSDOSFileAttr16(self, "attributes")
+        yield CString(self, "filename", charset="ASCII")
+
+    def createDescription(self):
+        return "File %s (%s)" % (
+            self["filename"].display, self["filesize"].display)
+
+class Reserved(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "size")
+        size = self["size"].value
+        if size:
+            yield RawBytes(self, "data", size)
+
+class Flags(FieldSet):
+    static_size = 16
+    def createFields(self):
+        yield Bit(self, "has_previous")
+        yield Bit(self, "has_next")
+        yield Bit(self, "has_reserved")
+        yield NullBits(self, "padding", 13)
+
+class CabFile(Parser):
+    endian = LITTLE_ENDIAN
+    MAGIC = "MSCF"
+    PARSER_TAGS = {
+        "id": "cab",
+        "category": "archive",
+        "file_ext": ("cab",),
+        "mime": (u"application/vnd.ms-cab-compressed",),
+        "magic": ((MAGIC, 0),),
+        "min_size": 1*8, # header + file entry
+        "description": "Microsoft Cabinet archive"
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != self.MAGIC:
+            return "Invalid magic"
+        if self["cab_version"].value != 0x0103:
+            return "Unknown version (%s)" % self["cab_version"].display
+        if not (1 <= self["nb_folder"].value <= MAX_NB_FOLDER):
+            return "Invalid number of folder (%s)" % self["nb_folder"].value
+        return True
+
+    def createFields(self):
+        yield String(self, "magic", 4, "Magic (MSCF)", charset="ASCII")
+        yield textHandler(UInt32(self, "hdr_checksum", "Header checksum (0 if not used)"), hexadecimal)
+        yield filesizeHandler(UInt32(self, "filesize", "Cabinet file size"))
+        yield textHandler(UInt32(self, "fld_checksum", "Folders checksum (0 if not used)"), hexadecimal)
+        yield UInt32(self, "off_file", "Offset of first file")
+        yield textHandler(UInt32(self, "files_checksum", "Files checksum (0 if not used)"), hexadecimal)
+        yield textHandler(UInt16(self, "cab_version", "Cabinet version"), hexadecimal)
+        yield UInt16(self, "nb_folder", "Number of folders")
+        yield UInt16(self, "nb_files", "Number of files")
+        yield Flags(self, "flags")
+        yield UInt16(self, "setid")
+        yield UInt16(self, "number", "Zero-based cabinet number")
+
+        # --- TODO: Support flags
+        if self["flags/has_reserved"].value:
+            yield Reserved(self, "reserved")
+        #(3) Previous cabinet name, if CAB_HEADER.flags & CAB_FLAG_HASPREV
+        #(4) Previous disk name, if CAB_HEADER.flags & CAB_FLAG_HASPREV
+        #(5) Next cabinet name, if CAB_HEADER.flags & CAB_FLAG_HASNEXT
+        #(6) Next disk name, if CAB_HEADER.flags & CAB_FLAG_HASNEXT
+        # ----
+
+        for index in xrange(self["nb_folder"].value):
+            yield Folder(self, "folder[]")
+        for index in xrange(self["nb_files"].value):
+            yield File(self, "file[]")
+
+        end = self.seekBit(self.size, "endraw")
+        if end:
+            yield end
+
+    def createContentSize(self):
+        return self["filesize"].value * 8
+
diff --git a/lib/hachoir_parser/archive/gzip_parser.py b/lib/hachoir_parser/archive/gzip_parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..c0820332c061691ea781500b01b5035abe6b35e3
--- /dev/null
+++ b/lib/hachoir_parser/archive/gzip_parser.py
@@ -0,0 +1,129 @@
+"""
+GZIP archive parser.
+
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (
+    UInt8, UInt16, UInt32, Enum, TimestampUnix32,
+    Bit, CString, SubFile,
+    NullBits, Bytes, RawBytes)
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_parser.common.deflate import Deflate
+
+class GzipParser(Parser):
+    endian = LITTLE_ENDIAN
+    PARSER_TAGS = {
+        "id": "gzip",
+        "category": "archive",
+        "file_ext": ("gz",),
+        "mime": (u"application/x-gzip",),
+        "min_size": 18*8,
+        #"magic": (('\x1F\x8B\x08', 0),),
+        "magic_regex": (
+            # (magic, compression=deflate, <flags>, <mtime>, )
+            ('\x1F\x8B\x08.{5}[\0\2\4\6][\x00-\x0D]', 0),
+        ),
+        "description": u"gzip archive",
+    }
+    os_name = {
+         0: u"FAT filesystem",
+         1: u"Amiga",
+         2: u"VMS (or OpenVMS)",
+         3: u"Unix",
+         4: u"VM/CMS",
+         5: u"Atari TOS",
+         6: u"HPFS filesystem (OS/2, NT)",
+         7: u"Macintosh",
+         8: u"Z-System",
+         9: u"CP/M",
+        10: u"TOPS-20",
+        11: u"NTFS filesystem (NT)",
+        12: u"QDOS",
+        13: u"Acorn RISCOS",
+    }
+    COMPRESSION_NAME = {
+        8: u"deflate",
+    }
+
+    def validate(self):
+        if self["signature"].value != '\x1F\x8B':
+            return "Invalid signature"
+        if self["compression"].value not in self.COMPRESSION_NAME:
+            return "Unknown compression method (%u)" % self["compression"].value
+        if self["reserved[0]"].value != 0:
+            return "Invalid reserved[0] value"
+        if self["reserved[1]"].value != 0:
+            return "Invalid reserved[1] value"
+        if self["reserved[2]"].value != 0:
+            return "Invalid reserved[2] value"
+        return True
+
+    def createFields(self):
+        # Gzip header
+        yield Bytes(self, "signature", 2, r"GZip file signature (\x1F\x8B)")
+        yield Enum(UInt8(self, "compression", "Compression method"), self.COMPRESSION_NAME)
+
+        # Flags
+        yield Bit(self, "is_text", "File content is probably ASCII text")
+        yield Bit(self, "has_crc16", "Header CRC16")
+        yield Bit(self, "has_extra", "Extra informations (variable size)")
+        yield Bit(self, "has_filename", "Contains filename?")
+        yield Bit(self, "has_comment", "Contains comment?")
+        yield NullBits(self, "reserved[]", 3)
+        yield TimestampUnix32(self, "mtime", "Modification time")
+
+        # Extra flags
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "slowest", "Compressor used maximum compression (slowest)")
+        yield Bit(self, "fastest", "Compressor used the fastest compression")
+        yield NullBits(self, "reserved[]", 5)
+        yield Enum(UInt8(self, "os", "Operating system"), self.os_name)
+
+        # Optional fields
+        if self["has_extra"].value:
+            yield UInt16(self, "extra_length", "Extra length")
+            yield RawBytes(self, "extra", self["extra_length"].value, "Extra")
+        if self["has_filename"].value:
+            yield CString(self, "filename", "Filename", charset="ISO-8859-1")
+        if self["has_comment"].value:
+            yield CString(self, "comment", "Comment")
+        if self["has_crc16"].value:
+            yield textHandler(UInt16(self, "hdr_crc16", "CRC16 of the header"),
+                hexadecimal)
+
+        if self._size is None: # TODO: is it possible to handle piped input?
+            raise NotImplementedError()
+
+        # Read file
+        size = (self._size - self.current_size) // 8 - 8  # -8: crc32+size
+        if 0 < size:
+            if self["has_filename"].value:
+                filename = self["filename"].value
+            else:
+                for tag, filename in self.stream.tags:
+                    if tag == "filename" and filename.endswith(".gz"):
+                        filename = filename[:-3]
+                        break
+                else:
+                    filename = None
+            yield Deflate(SubFile(self, "file", size, filename=filename))
+
+        # Footer
+        yield textHandler(UInt32(self, "crc32",
+            "Uncompressed data content CRC32"), hexadecimal)
+        yield filesizeHandler(UInt32(self, "size", "Uncompressed size"))
+
+    def createDescription(self):
+        desc = u"gzip archive"
+        info = []
+        if "filename" in self:
+            info.append('filename "%s"' % self["filename"].value)
+        if "size" in self:
+            info.append("was %s" % self["size"].display)
+        if self["mtime"].value:
+            info.append(self["mtime"].display)
+        return "%s: %s" % (desc, ", ".join(info))
+
diff --git a/lib/hachoir_parser/archive/mar.py b/lib/hachoir_parser/archive/mar.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a7e31a768f55d80658ca1ea6dd46f5045b7846f
--- /dev/null
+++ b/lib/hachoir_parser/archive/mar.py
@@ -0,0 +1,67 @@
+"""
+Microsoft Archive parser
+
+Author: Victor Stinner
+Creation date: 2007-03-04
+"""
+
+MAX_NB_FILE = 100000
+
+from hachoir_parser import Parser
+from hachoir_core.field import FieldSet, String, UInt32, SubFile
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal
+
+class FileIndex(FieldSet):
+    static_size = 68*8
+
+    def createFields(self):
+        yield String(self, "filename", 56, truncate="\0", charset="ASCII")
+        yield filesizeHandler(UInt32(self, "filesize"))
+        yield textHandler(UInt32(self, "crc32"), hexadecimal)
+        yield UInt32(self, "offset")
+
+    def createDescription(self):
+        return "File %s (%s) at %s" % (
+            self["filename"].value, self["filesize"].display, self["offset"].value)
+
+class MarFile(Parser):
+    MAGIC = "MARC"
+    PARSER_TAGS = {
+        "id": "mar",
+        "category": "archive",
+        "file_ext": ("mar",),
+        "min_size": 80*8,  # At least one file index
+        "magic": ((MAGIC, 0),),
+        "description": "Microsoft Archive",
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != self.MAGIC:
+            return "Invalid magic"
+        if self["version"].value != 3:
+            return "Invalid version"
+        if not(1 <= self["nb_file"].value <= MAX_NB_FILE):
+            return "Invalid number of file"
+        return True
+
+    def createFields(self):
+        yield String(self, "magic", 4, "File signature (MARC)", charset="ASCII")
+        yield UInt32(self, "version")
+        yield UInt32(self, "nb_file")
+        files = []
+        for index in xrange(self["nb_file"].value):
+            item = FileIndex(self, "file[]")
+            yield item
+            if item["filesize"].value:
+                files.append(item)
+        files.sort(key=lambda item: item["offset"].value)
+        for index in files:
+            padding = self.seekByte(index["offset"].value)
+            if padding:
+                yield padding
+            size = index["filesize"].value
+            desc = "File %s" % index["filename"].value
+            yield SubFile(self, "data[]", size, desc, filename=index["filename"].value)
+
diff --git a/lib/hachoir_parser/archive/rar.py b/lib/hachoir_parser/archive/rar.py
new file mode 100644
index 0000000000000000000000000000000000000000..2be5887c482028dda13aa33077f6ea2d235f905a
--- /dev/null
+++ b/lib/hachoir_parser/archive/rar.py
@@ -0,0 +1,353 @@
+"""
+RAR parser
+
+Status: can only read higher-level attructures
+Author: Christophe Gisquet
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (StaticFieldSet, FieldSet,
+    Bit, Bits, Enum,
+    UInt8, UInt16, UInt32, UInt64,
+    String, TimeDateMSDOS32,
+    NullBytes, NullBits, RawBytes)
+from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_parser.common.msdos import MSDOSFileAttr32
+
+MAX_FILESIZE = 1000 * 1024 * 1024
+
+BLOCK_NAME = {
+    0x72: "Marker",
+    0x73: "Archive",
+    0x74: "File",
+    0x75: "Comment",
+    0x76: "Extra info",
+    0x77: "Subblock",
+    0x78: "Recovery record",
+    0x79: "Archive authenticity",
+    0x7A: "New-format subblock",
+    0x7B: "Archive end",
+}
+
+COMPRESSION_NAME = {
+    0x30: "Storing",
+    0x31: "Fastest compression",
+    0x32: "Fast compression",
+    0x33: "Normal compression",
+    0x34: "Good compression",
+    0x35: "Best compression"
+}
+
+OS_MSDOS = 0
+OS_WIN32 = 2
+OS_NAME = {
+    0: "MS DOS",
+    1: "OS/2",
+    2: "Win32",
+    3: "Unix",
+}
+
+DICTIONARY_SIZE = {
+    0: "Dictionary size 64 Kb",
+    1: "Dictionary size 128 Kb",
+    2: "Dictionary size 256 Kb",
+    3: "Dictionary size 512 Kb",
+    4: "Dictionary size 1024 Kb",
+    7: "File is a directory",
+}
+
+def formatRARVersion(field):
+    """
+    Decodes the RAR version stored on 1 byte
+    """
+    return "%u.%u" % divmod(field.value, 10)
+
+def commonFlags(s):
+    yield Bit(s, "has_added_size", "Additional field indicating additional size")
+    yield Bit(s, "is_ignorable", "Old versions of RAR should ignore this block when copying data")
+
+class ArchiveFlags(StaticFieldSet):
+    format = (
+        (Bit, "vol", "Archive volume"),
+        (Bit, "has_comment", "Whether there is a comment"),
+        (Bit, "is_locked", "Archive volume"),
+        (Bit, "is_solid", "Whether files can be extracted separately"),
+        (Bit, "new_numbering", "New numbering, or compressed comment"), # From unrar
+        (Bit, "has_authenticity_information", "The integrity/authenticity of the archive can be checked"),
+        (Bit, "is_protected", "The integrity/authenticity of the archive can be checked"),
+        (Bit, "is_passworded", "Needs a password to be decrypted"),
+        (Bit, "is_first_vol", "Whether it is the first volume"),
+        (Bit, "is_encrypted", "Whether the encryption version is present"),
+        (NullBits, "internal", 6, "Reserved for 'internal use'")
+    )
+
+def archiveFlags(s):
+    yield ArchiveFlags(s, "flags", "Archiver block flags")
+
+def archiveHeader(s):
+    yield NullBytes(s, "reserved[]", 2, "Reserved word")
+    yield NullBytes(s, "reserved[]", 4, "Reserved dword")
+
+def commentHeader(s):
+    yield filesizeHandler(UInt16(s, "total_size", "Comment header size + comment size"))
+    yield filesizeHandler(UInt16(s, "uncompressed_size", "Uncompressed comment size"))
+    yield UInt8(s, "required_version", "RAR version needed to extract comment")
+    yield UInt8(s, "packing_method", "Comment packing method")
+    yield UInt16(s, "comment_crc16", "Comment CRC")
+
+def commentBody(s):
+    size = s["total_size"].value - s.current_size
+    if size > 0:
+        yield RawBytes(s, "comment_data", size, "Compressed comment data")
+
+def signatureHeader(s):
+    yield TimeDateMSDOS32(s, "creation_time")
+    yield filesizeHandler(UInt16(s, "arc_name_size"))
+    yield filesizeHandler(UInt16(s, "user_name_size"))
+
+def recoveryHeader(s):
+    yield filesizeHandler(UInt32(s, "total_size"))
+    yield textHandler(UInt8(s, "version"), hexadecimal)
+    yield UInt16(s, "rec_sectors")
+    yield UInt32(s, "total_blocks")
+    yield RawBytes(s, "mark", 8)
+
+def avInfoHeader(s):
+    yield filesizeHandler(UInt16(s, "total_size", "Total block size"))
+    yield UInt8(s, "version", "Version needed to decompress", handler=hexadecimal)
+    yield UInt8(s, "method", "Compression method", handler=hexadecimal)
+    yield UInt8(s, "av_version", "Version for AV", handler=hexadecimal)
+    yield UInt32(s, "av_crc", "AV info CRC32", handler=hexadecimal)
+
+def avInfoBody(s):
+    size = s["total_size"].value - s.current_size
+    if size > 0:
+        yield RawBytes(s, "av_info_data", size, "AV info")
+
+class FileFlags(FieldSet):
+    static_size = 16
+    def createFields(self):
+        yield Bit(self, "continued_from", "File continued from previous volume")
+        yield Bit(self, "continued_in", "File continued in next volume")
+        yield Bit(self, "is_encrypted", "File encrypted with password")
+        yield Bit(self, "has_comment", "File comment present")
+        yield Bit(self, "is_solid", "Information from previous files is used (solid flag)")
+        # The 3 following lines are what blocks more staticity
+        yield Enum(Bits(self, "dictionary_size", 3, "Dictionary size"), DICTIONARY_SIZE)
+        for bit in commonFlags(self):
+            yield bit
+        yield Bit(self, "is_large", "file64 operations needed")
+        yield Bit(self, "is_unicode", "Filename also encoded using Unicode")
+        yield Bit(self, "has_salt", "Has salt for encryption")
+        yield Bit(self, "uses_file_version", "File versioning is used")
+        yield Bit(self, "has_ext_time", "Extra time ??")
+        yield Bit(self, "has_ext_flags", "Extra flag ??")
+
+def fileFlags(s):
+    yield FileFlags(s, "flags", "File block flags")
+
+class ExtTime(FieldSet):
+    def createFields(self):
+        yield textHandler(UInt16(self, "time_flags", "Flags for extended time"), hexadecimal)
+        flags = self["time_flags"].value
+        for index in xrange(4):
+            rmode = flags >> ((3-index)*4)
+            if rmode & 8:
+                if index:
+                    yield TimeDateMSDOS32(self, "dos_time[]", "DOS Time")
+                if rmode & 3:
+                    yield RawBytes(self, "remainder[]", rmode & 3, "Time remainder")
+
+def specialHeader(s, is_file):
+    yield filesizeHandler(UInt32(s, "compressed_size", "Compressed size (bytes)"))
+    yield filesizeHandler(UInt32(s, "uncompressed_size", "Uncompressed size (bytes)"))
+    yield Enum(UInt8(s, "host_os", "Operating system used for archiving"), OS_NAME)
+    yield textHandler(UInt32(s, "crc32", "File CRC32"), hexadecimal)
+    yield TimeDateMSDOS32(s, "ftime", "Date and time (MS DOS format)")
+    yield textHandler(UInt8(s, "version", "RAR version needed to extract file"), formatRARVersion)
+    yield Enum(UInt8(s, "method", "Packing method"), COMPRESSION_NAME)
+    yield filesizeHandler(UInt16(s, "filename_length", "File name size"))
+    if s["host_os"].value in (OS_MSDOS, OS_WIN32):
+        yield MSDOSFileAttr32(s, "file_attr", "File attributes")
+    else:
+        yield textHandler(UInt32(s, "file_attr", "File attributes"), hexadecimal)
+
+    # Start additional field from unrar
+    if s["flags/is_large"].value:
+        yield filesizeHandler(UInt64(s, "large_size", "Extended 64bits filesize"))
+
+    # End additional field
+    size = s["filename_length"].value
+    if size > 0:
+        if s["flags/is_unicode"].value:
+            charset = "UTF-8"
+        else:
+            charset = "ISO-8859-15"
+        yield String(s, "filename", size, "Filename", charset=charset)
+    # Start additional fields from unrar - file only
+    if is_file:
+        if s["flags/has_salt"].value:
+            yield textHandler(UInt8(s, "salt", "Salt"), hexadecimal)
+        if s["flags/has_ext_time"].value:
+            yield ExtTime(s, "extra_time", "Extra time info")
+
+def fileHeader(s):
+    return specialHeader(s, True)
+
+def fileBody(s):
+    # File compressed data
+    size = s["compressed_size"].value
+    if s["flags/is_large"].value:
+        size += s["large_size"].value
+    if size > 0:
+        yield RawBytes(s, "compressed_data", size, "File compressed data")
+
+def fileDescription(s):
+    return "File entry: %s (%s)" % \
+           (s["filename"].display, s["compressed_size"].display)
+
+def newSubHeader(s):
+    return specialHeader(s, False)
+
+class EndFlags(StaticFieldSet):
+    format = (
+        (Bit, "has_next_vol", "Whether there is another next volume"),
+        (Bit, "has_data_crc", "Whether a CRC value is present"),
+        (Bit, "rev_space"),
+        (Bit, "has_vol_number", "Whether the volume number is present"),
+        (Bits, "unused[]", 4),
+        (Bit, "has_added_size", "Additional field indicating additional size"),
+        (Bit, "is_ignorable", "Old versions of RAR should ignore this block when copying data"),
+        (Bits, "unused[]", 6),
+    )
+
+def endFlags(s):
+    yield EndFlags(s, "flags", "End block flags")
+
+class BlockFlags(FieldSet):
+    static_size = 16
+
+    def createFields(self):
+        yield textHandler(Bits(self, "unused[]", 8, "Unused flag bits"), hexadecimal)
+        yield Bit(self, "has_added_size", "Additional field indicating additional size")
+        yield Bit(self, "is_ignorable", "Old versions of RAR should ignore this block when copying data")
+        yield Bits(self, "unused[]", 6)
+
+class Block(FieldSet):
+    BLOCK_INFO = {
+        # None means 'use default function'
+        0x72: ("marker", "Archive header", None, None, None),
+        0x73: ("archive_start", "Archive info", archiveFlags, archiveHeader, None),
+        0x74: ("file[]", fileDescription, fileFlags, fileHeader, fileBody),
+        0x75: ("comment[]", "Stray comment", None, commentHeader, commentBody),
+        0x76: ("av_info[]", "Extra information", None, avInfoHeader, avInfoBody),
+        0x77: ("sub_block[]", "Stray subblock", None, newSubHeader, fileBody),
+        0x78: ("recovery[]", "Recovery block", None, recoveryHeader, None),
+        0x79: ("signature", "Signature block", None, signatureHeader, None),
+        0x7A: ("new_sub_block[]", "Stray new-format subblock", fileFlags,
+               newSubHeader, fileBody),
+        0x7B: ("archive_end", "Archive end block", endFlags, None, None),
+    }
+
+    def __init__(self, parent, name):
+        FieldSet.__init__(self, parent, name)
+        t = self["block_type"].value
+        if t in self.BLOCK_INFO:
+            self._name, desc, parseFlags, parseHeader, parseBody = self.BLOCK_INFO[t]
+            if callable(desc):
+                self.createDescription = lambda: desc(self)
+            elif desc:
+                self._description = desc
+            if parseFlags    : self.parseFlags     = lambda: parseFlags(self)
+            if parseHeader   : self.parseHeader    = lambda: parseHeader(self)
+            if parseBody     : self.parseBody      = lambda: parseBody(self)
+        else:
+            self.info("Processing as unknown block block of type %u" % type)
+
+        self._size = 8*self["block_size"].value
+        if t == 0x74 or t == 0x7A:
+            self._size += 8*self["compressed_size"].value
+            if "is_large" in self["flags"] and self["flags/is_large"].value:
+                self._size += 8*self["large_size"].value
+        elif "has_added_size" in self:
+            self._size += 8*self["added_size"].value
+        # TODO: check if any other member is needed here
+
+    def createFields(self):
+        yield textHandler(UInt16(self, "crc16", "Block CRC16"), hexadecimal)
+        yield textHandler(UInt8(self, "block_type", "Block type"), hexadecimal)
+
+        # Parse flags
+        for field in self.parseFlags():
+            yield field
+
+        # Get block size
+        yield filesizeHandler(UInt16(self, "block_size", "Block size"))
+
+        # Parse remaining header
+        for field in self.parseHeader():
+            yield field
+
+        # Finish header with stuff of unknow size
+        size = self["block_size"].value - (self.current_size//8)
+        if size > 0:
+            yield RawBytes(self, "unknown", size, "Unknow data (UInt32 probably)")
+
+        # Parse body
+        for field in self.parseBody():
+            yield field
+
+    def createDescription(self):
+        return "Block entry: %s" % self["type"].display
+
+    def parseFlags(self):
+        yield BlockFlags(self, "flags", "Block header flags")
+
+    def parseHeader(self):
+        if "has_added_size" in self["flags"] and \
+           self["flags/has_added_size"].value:
+            yield filesizeHandler(UInt32(self, "added_size",
+                "Supplementary block size"))
+
+    def parseBody(self):
+        """
+        Parse what is left of the block
+        """
+        size = self["block_size"].value - (self.current_size//8)
+        if "has_added_size" in self["flags"] and self["flags/has_added_size"].value:
+            size += self["added_size"].value
+        if size > 0:
+            yield RawBytes(self, "body", size, "Body data")
+
+class RarFile(Parser):
+    MAGIC = "Rar!\x1A\x07\x00"
+    PARSER_TAGS = {
+        "id": "rar",
+        "category": "archive",
+        "file_ext": ("rar",),
+        "mime": (u"application/x-rar-compressed", ),
+        "min_size": 7*8,
+        "magic": ((MAGIC, 0),),
+        "description": "Roshal archive (RAR)",
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        magic = self.MAGIC
+        if self.stream.readBytes(0, len(magic)) != magic:
+            return "Invalid magic"
+        return True
+
+    def createFields(self):
+        while not self.eof:
+            yield Block(self, "block[]")
+
+    def createContentSize(self):
+        start = 0
+        end = MAX_FILESIZE * 8
+        pos = self.stream.searchBytes("\xC4\x3D\x7B\x00\x40\x07\x00", start, end)
+        if pos is not None:
+            return pos + 7*8
+        return None
+
diff --git a/lib/hachoir_parser/archive/rpm.py b/lib/hachoir_parser/archive/rpm.py
new file mode 100644
index 0000000000000000000000000000000000000000..ccb8d2e58f7096a083e510d1cdbe8a64a48a2c6c
--- /dev/null
+++ b/lib/hachoir_parser/archive/rpm.py
@@ -0,0 +1,267 @@
+"""
+RPM archive parser.
+
+Author: Victor Stinner, 1st December 2005.
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt16, UInt32, UInt64, Enum,
+    NullBytes, Bytes, RawBytes, SubFile,
+    Character, CString, String)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_parser.archive.gzip_parser import GzipParser
+from hachoir_parser.archive.bzip2_parser import Bzip2Parser
+
+class ItemContent(FieldSet):
+    format_type = {
+        0: UInt8,
+        1: Character,
+        2: UInt8,
+        3: UInt16,
+        4: UInt32,
+        5: UInt64,
+        6: CString,
+        7: RawBytes,
+        8: CString,
+        9: CString
+    }
+
+    def __init__(self, parent, name, item):
+        FieldSet.__init__(self, parent, name, item.description)
+        self.related_item = item
+        self._name = "content_%s" % item.name
+
+    def createFields(self):
+        item = self.related_item
+        type = item["type"].value
+
+        cls = self.format_type[type]
+        count = item["count"].value
+        if cls is RawBytes: # or type == 8:
+            if cls is RawBytes:
+                args = (self, "value", count)
+            else:
+                args = (self, "value") # cls is CString
+            count = 1
+        else:
+            if 1 < count:
+                args = (self, "value[]")
+            else:
+                args = (self, "value")
+        for index in xrange(count):
+            yield cls(*args)
+
+class Item(FieldSet):
+    type_name = {
+        0: "NULL",
+        1: "CHAR",
+        2: "INT8",
+        3: "INT16",
+        4: "INT32",
+        5: "INT64",
+        6: "CSTRING",
+        7: "BIN",
+        8: "CSTRING_ARRAY",
+        9: "CSTRING?"
+    }
+    tag_name = {
+        1000: "File size",
+        1001: "(Broken) MD5 signature",
+        1002: "PGP 2.6.3 signature",
+        1003: "(Broken) MD5 signature",
+        1004: "MD5 signature",
+        1005: "GnuPG signature",
+        1006: "PGP5 signature",
+        1007: "Uncompressed payload size (bytes)",
+        256+8: "Broken SHA1 header digest",
+        256+9: "Broken SHA1 header digest",
+        256+13: "Broken SHA1 header digest",
+        256+11: "DSA header signature",
+        256+12: "RSA header signature"
+    }
+
+    def __init__(self, parent, name, description=None, tag_name_dict=None):
+        FieldSet.__init__(self, parent, name, description)
+        if tag_name_dict is None:
+            tag_name_dict = Item.tag_name
+        self.tag_name_dict = tag_name_dict
+
+    def createFields(self):
+        yield Enum(UInt32(self, "tag", "Tag"), self.tag_name_dict)
+        yield Enum(UInt32(self, "type", "Type"), Item.type_name)
+        yield UInt32(self, "offset", "Offset")
+        yield UInt32(self, "count", "Count")
+
+    def createDescription(self):
+        return "Item: %s (%s)" % (self["tag"].display, self["type"].display)
+
+class ItemHeader(Item):
+    tag_name = {
+        61: "Current image",
+        62: "Signatures",
+        63: "Immutable",
+        64: "Regions",
+        100: "I18N string locales",
+        1000: "Name",
+        1001: "Version",
+        1002: "Release",
+        1003: "Epoch",
+        1004: "Summary",
+        1005: "Description",
+        1006: "Build time",
+        1007: "Build host",
+        1008: "Install time",
+        1009: "Size",
+        1010: "Distribution",
+        1011: "Vendor",
+        1012: "Gif",
+        1013: "Xpm",
+        1014: "Licence",
+        1015: "Packager",
+        1016: "Group",
+        1017: "Changelog",
+        1018: "Source",
+        1019: "Patch",
+        1020: "Url",
+        1021: "OS",
+        1022: "Arch",
+        1023: "Prein",
+        1024: "Postin",
+        1025: "Preun",
+        1026: "Postun",
+        1027: "Old filenames",
+        1028: "File sizes",
+        1029: "File states",
+        1030: "File modes",
+        1031: "File uids",
+        1032: "File gids",
+        1033: "File rdevs",
+        1034: "File mtimes",
+        1035: "File MD5s",
+        1036: "File link to's",
+        1037: "File flags",
+        1038: "Root",
+        1039: "File username",
+        1040: "File groupname",
+        1043: "Icon",
+        1044: "Source rpm",
+        1045: "File verify flags",
+        1046: "Archive size",
+        1047: "Provide name",
+        1048: "Require flags",
+        1049: "Require name",
+        1050: "Require version",
+        1051: "No source",
+        1052: "No patch",
+        1053: "Conflict flags",
+        1054: "Conflict name",
+        1055: "Conflict version",
+        1056: "Default prefix",
+        1057: "Build root",
+        1058: "Install prefix",
+        1059: "Exclude arch",
+        1060: "Exclude OS",
+        1061: "Exclusive arch",
+        1062: "Exclusive OS",
+        1064: "RPM version",
+        1065: "Trigger scripts",
+        1066: "Trigger name",
+        1067: "Trigger version",
+        1068: "Trigger flags",
+        1069: "Trigger index",
+        1079: "Verify script",
+        #TODO: Finish the list (id 1070..1162 using rpm library source code)
+    }
+
+    def __init__(self, parent, name, description=None):
+        Item.__init__(self, parent, name, description, self.tag_name)
+
+def sortRpmItem(a,b):
+    return int( a["offset"].value - b["offset"].value )
+
+class PropertySet(FieldSet):
+    def __init__(self, parent, name, *args):
+        FieldSet.__init__(self, parent, name, *args)
+        self._size = self["content_item[1]"].address + self["size"].value * 8
+
+    def createFields(self):
+        # Read chunk header
+        yield Bytes(self, "signature", 3, r"Property signature (\x8E\xAD\xE8)")
+        if self["signature"].value != "\x8E\xAD\xE8":
+            raise ParserError("Invalid property signature")
+        yield UInt8(self, "version", "Signature version")
+        yield NullBytes(self, "reserved", 4, "Reserved")
+        yield UInt32(self, "count", "Count")
+        yield UInt32(self, "size", "Size")
+
+        # Read item header
+        items = []
+        for i in range(0, self["count"].value):
+            item = ItemHeader(self, "item[]")
+            yield item
+            items.append(item)
+
+        # Sort items by their offset
+        items.sort( sortRpmItem )
+
+        # Read item content
+        start = self.current_size/8
+        for item in items:
+            offset = item["offset"].value
+            diff = offset - (self.current_size/8 - start)
+            if 0 < diff:
+                yield NullBytes(self, "padding[]", diff)
+            yield ItemContent(self, "content[]", item)
+        size = start + self["size"].value - self.current_size/8
+        if 0 < size:
+            yield NullBytes(self, "padding[]", size)
+
+class RpmFile(Parser):
+    PARSER_TAGS = {
+        "id": "rpm",
+        "category": "archive",
+        "file_ext": ("rpm",),
+        "mime": (u"application/x-rpm",),
+        "min_size": (96 + 16 + 16)*8, # file header + checksum + content header
+        "magic": (('\xED\xAB\xEE\xDB', 0),),
+        "description": "RPM package"
+    }
+    TYPE_NAME = {
+        0: "Binary",
+        1: "Source"
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self["signature"].value != '\xED\xAB\xEE\xDB':
+            return "Invalid signature"
+        if self["major_ver"].value != 3:
+            return "Unknown major version (%u)" % self["major_ver"].value
+        if self["type"].value not in self.TYPE_NAME:
+            return "Invalid RPM type"
+        return True
+
+    def createFields(self):
+        yield Bytes(self, "signature", 4, r"RPM file signature (\xED\xAB\xEE\xDB)")
+        yield UInt8(self, "major_ver", "Major version")
+        yield UInt8(self, "minor_ver", "Minor version")
+        yield Enum(UInt16(self, "type", "RPM type"), RpmFile.TYPE_NAME)
+        yield UInt16(self, "architecture", "Architecture")
+        yield String(self, "name", 66, "Archive name", strip="\0", charset="ASCII")
+        yield UInt16(self, "os", "OS")
+        yield UInt16(self, "signature_type", "Type of signature")
+        yield NullBytes(self, "reserved", 16, "Reserved")
+        yield PropertySet(self, "checksum", "Checksum (signature)")
+        yield PropertySet(self, "header", "Header")
+
+        if self._size is None: # TODO: is it possible to handle piped input?
+            raise NotImplementedError
+
+        size = (self._size - self.current_size) // 8
+        if size:
+            if 3 <= size and self.stream.readBytes(self.current_size, 3) == "BZh":
+                yield SubFile(self, "content", size, "bzip2 content", parser=Bzip2Parser)
+            else:
+                yield SubFile(self, "content", size, "gzip content", parser=GzipParser)
+
diff --git a/lib/hachoir_parser/archive/sevenzip.py b/lib/hachoir_parser/archive/sevenzip.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a0148f5f86b13c63c27591322a11236acd91377
--- /dev/null
+++ b/lib/hachoir_parser/archive/sevenzip.py
@@ -0,0 +1,401 @@
+"""
+7zip file parser
+
+Informations:
+- File 7zformat.txt of 7-zip SDK:
+  http://www.7-zip.org/sdk.html
+
+Author: Olivier SCHWAB
+Creation date: 6 december 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (Field, FieldSet, ParserError,
+    GenericVector,
+    Enum, UInt8, UInt32, UInt64,
+    Bytes, RawBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+
+class SZUInt64(Field):
+    """
+    Variable length UInt64, where the first byte gives both the number of bytes
+    needed and the upper byte value.
+    """
+    def __init__(self, parent, name, max_size=None, description=None):
+        Field.__init__(self, parent, name, size=8, description=description)
+        value = 0
+        addr = self.absolute_address
+        mask = 0x80
+        firstByte = parent.stream.readBits(addr, 8, LITTLE_ENDIAN)
+        for i in xrange(8):
+            addr += 8
+            if not (firstByte & mask):
+                value += ((firstByte & (mask-1)) << (8*i))
+                break
+            value |= (parent.stream.readBits(addr, 8, LITTLE_ENDIAN) << (8*i))
+            mask >>= 1
+            self._size += 8
+        self.createValue = lambda: value
+
+ID_END, ID_HEADER, ID_ARCHIVE_PROPS, ID_ADD_STREAM_INFO, ID_MAIN_STREAM_INFO, \
+ID_FILES_INFO, ID_PACK_INFO, ID_UNPACK_INFO, ID_SUBSTREAMS_INFO, ID_SIZE, \
+ID_CRC, ID_FOLDER, ID_CODERS_UNPACK_SIZE, ID_NUM_UNPACK_STREAMS, \
+ID_EMPTY_STREAM, ID_EMPTY_FILE, ID_ANTI, ID_NAME, ID_CREATION_TIME, \
+ID_LAST_ACCESS_TIME, ID_LAST_WRITE_TIME, ID_WIN_ATTR, ID_COMMENT, \
+ID_ENCODED_HEADER = xrange(24)
+
+ID_INFO = {
+    ID_END               : "End",
+    ID_HEADER            : "Header embedding another one",
+    ID_ARCHIVE_PROPS     : "Archive Properties",
+    ID_ADD_STREAM_INFO   : "Additional Streams Info",
+    ID_MAIN_STREAM_INFO  : "Main Streams Info",
+    ID_FILES_INFO        : "Files Info",
+    ID_PACK_INFO         : "Pack Info",
+    ID_UNPACK_INFO       : "Unpack Info",
+    ID_SUBSTREAMS_INFO   : "Substreams Info",
+    ID_SIZE              : "Size",
+    ID_CRC               : "CRC",
+    ID_FOLDER            : "Folder",
+    ID_CODERS_UNPACK_SIZE: "Coders Unpacked size",
+    ID_NUM_UNPACK_STREAMS: "Number of Unpacked Streams",
+    ID_EMPTY_STREAM      : "Empty Stream",
+    ID_EMPTY_FILE        : "Empty File",
+    ID_ANTI              : "Anti",
+    ID_NAME              : "Name",
+    ID_CREATION_TIME     : "Creation Time",
+    ID_LAST_ACCESS_TIME  : "Last Access Time",
+    ID_LAST_WRITE_TIME   : "Last Write Time",
+    ID_WIN_ATTR          : "Win Attributes",
+    ID_COMMENT           : "Comment",
+    ID_ENCODED_HEADER    : "Header holding encoded data info",
+}
+
+class SkippedData(FieldSet):
+    def createFields(self):
+        yield Enum(UInt8(self, "id[]"), ID_INFO)
+        size = SZUInt64(self, "size")
+        yield size
+        if size.value > 0:
+            yield RawBytes(self, "data", size.value)
+
+def waitForID(s, wait_id, wait_name="waited_id[]"):
+    while not s.eof:
+        addr = s.absolute_address+s.current_size
+        uid = s.stream.readBits(addr, 8, LITTLE_ENDIAN)
+        if uid == wait_id:
+            yield Enum(UInt8(s, wait_name), ID_INFO)
+            s.info("Found ID %s (%u)" % (ID_INFO[uid], uid))
+            return
+        s.info("Skipping ID %u!=%u" % (uid, wait_id))
+        yield SkippedData(s, "skipped_id[]", "%u != %u" % (uid, wait_id))
+
+class HashDigest(FieldSet):
+    def __init__(self, parent, name, num_digests, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self.num_digests = num_digests
+    def createFields(self):
+        yield Enum(UInt8(self, "id"), ID_INFO)
+        bytes = self.stream.readBytes(self.absolute_address, self.num_digests)
+        if self.num_digests > 0:
+            yield GenericVector(self, "defined[]", self.num_digests, UInt8, "bool")
+            for index in xrange(self.num_digests):
+                if bytes[index]:
+                    yield textHandler(UInt32(self, "hash[]",
+                        "Hash for digest %u" % index), hexadecimal)
+
+class PackInfo(FieldSet):
+    def createFields(self):
+        yield Enum(UInt8(self, "id"), ID_INFO)
+        # Very important, helps determine where the data is
+        yield SZUInt64(self, "pack_pos", "Position of the packs")
+        num = SZUInt64(self, "num_pack_streams")
+        yield num
+        num = num.value
+
+        for field in waitForID(self, ID_SIZE, "size_marker"):
+            yield field
+
+        for size in xrange(num):
+            yield SZUInt64(self, "pack_size[]")
+
+        while not self.eof:
+            addr = self.absolute_address+self.current_size
+            uid = self.stream.readBits(addr, 8, LITTLE_ENDIAN)
+            if uid == ID_END:
+                yield Enum(UInt8(self, "end_marker"), ID_INFO)
+                break
+            elif uid == ID_CRC:
+                yield HashDigest(self, "hash_digest", size)
+            else:
+                yield SkippedData(self, "skipped_data")
+
+def lzmaParams(value):
+    param = value.value
+    remainder = param / 9
+    # Literal coder context bits
+    lc = param % 9
+    # Position state bits
+    pb = remainder / 5
+    # Literal coder position bits
+    lp = remainder % 5
+    return "lc=%u pb=%u lp=%u" % (lc, lp, pb)
+
+class CoderID(FieldSet):
+    CODECS = {
+        # Only 2 methods ... and what about PPMD ?
+        "\0"    : "copy",
+        "\3\1\1": "lzma",
+    }
+    def createFields(self):
+        byte = UInt8(self, "id_size")
+        yield byte
+        byte = byte.value
+        self.info("ID=%u" % byte)
+        size = byte & 0xF
+        if size > 0:
+            name = self.stream.readBytes(self.absolute_address+self.current_size, size)
+            if name in self.CODECS:
+                name = self.CODECS[name]
+                self.info("Codec is %s" % name)
+            else:
+                self.info("Undetermined codec %s" % name)
+                name = "unknown"
+            yield RawBytes(self, name, size)
+            #yield textHandler(Bytes(self, "id", size), lambda: name)
+        if byte & 0x10:
+            yield SZUInt64(self, "num_stream_in")
+            yield SZUInt64(self, "num_stream_out")
+            self.info("Streams: IN=%u    OUT=%u" % \
+                      (self["num_stream_in"].value, self["num_stream_out"].value))
+        if byte & 0x20:
+            size = SZUInt64(self, "properties_size[]")
+            yield size
+            if size.value == 5:
+                #LzmaDecodeProperties@LZMAStateDecode.c
+                yield textHandler(UInt8(self, "parameters"), lzmaParams)
+                yield filesizeHandler(UInt32(self, "dictionary_size"))
+            elif size.value > 0:
+                yield RawBytes(self, "properties[]", size.value)
+
+class CoderInfo(FieldSet):
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self.in_streams = 1
+        self.out_streams = 1
+    def createFields(self):
+        # The real ID
+        addr = self.absolute_address + self.current_size
+        b = self.parent.stream.readBits(addr, 8, LITTLE_ENDIAN)
+        cid = CoderID(self, "coder_id")
+        yield cid
+        if b&0x10: # Work repeated, ...
+            self.in_streams = cid["num_stream_in"].value
+            self.out_streams = cid["num_stream_out"].value
+
+        # Skip other IDs
+        while b&0x80:
+            addr = self.absolute_address + self.current_size
+            b = self.parent.stream.readBits(addr, 8, LITTLE_ENDIAN)
+            yield CoderID(self, "unused_codec_id[]")
+
+class BindPairInfo(FieldSet):
+    def createFields(self):
+        # 64 bits values then cast to 32 in fact
+        yield SZUInt64(self, "in_index")
+        yield SZUInt64(self, "out_index")
+        self.info("Indexes: IN=%u   OUT=%u" % \
+                  (self["in_index"].value, self["out_index"].value))
+
+class FolderItem(FieldSet):
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self.in_streams = 0
+        self.out_streams = 0
+
+    def createFields(self):
+        yield SZUInt64(self, "num_coders")
+        num = self["num_coders"].value
+        self.info("Folder: %u codecs" % num)
+
+        # Coders info
+        for index in xrange(num):
+            ci = CoderInfo(self, "coder_info[]")
+            yield ci
+            self.in_streams += ci.in_streams
+            self.out_streams += ci.out_streams
+
+        # Bin pairs
+        self.info("out streams: %u" % self.out_streams)
+        for index in xrange(self.out_streams-1):
+            yield BindPairInfo(self, "bind_pair[]")
+
+        # Packed streams
+        # @todo: Actually find mapping
+        packed_streams = self.in_streams - self.out_streams + 1
+        if packed_streams == 1:
+            pass
+        else:
+            for index in xrange(packed_streams):
+                yield SZUInt64(self, "pack_stream[]")
+
+
+class UnpackInfo(FieldSet):
+    def createFields(self):
+        yield Enum(UInt8(self, "id"), ID_INFO)
+        # Wait for synch
+        for field in waitForID(self, ID_FOLDER, "folder_marker"):
+            yield field
+        yield SZUInt64(self, "num_folders")
+
+        # Get generic info
+        num = self["num_folders"].value
+        self.info("%u folders" % num)
+        yield UInt8(self, "is_external")
+
+        # Read folder items
+        for folder_index in xrange(num):
+            yield FolderItem(self, "folder_item[]")
+
+        # Get unpack sizes for each coder of each folder
+        for field in waitForID(self, ID_CODERS_UNPACK_SIZE, "coders_unpsize_marker"):
+            yield field
+        for folder_index in xrange(num):
+            folder_item = self["folder_item[%u]" % folder_index]
+            for index in xrange(folder_item.out_streams):
+                #yield UInt8(self, "unpack_size[]")
+                yield SZUInt64(self, "unpack_size[]")
+
+        # Extract digests
+        while not self.eof:
+            addr = self.absolute_address+self.current_size
+            uid = self.stream.readBits(addr, 8, LITTLE_ENDIAN)
+            if uid == ID_END:
+                yield Enum(UInt8(self, "end_marker"), ID_INFO)
+                break
+            elif uid == ID_CRC:
+                yield HashDigest(self, "hash_digest", num)
+            else:
+                yield SkippedData(self, "skip_data")
+
+class SubStreamInfo(FieldSet):
+    def createFields(self):
+        yield Enum(UInt8(self, "id"), ID_INFO)
+        raise ParserError("SubStreamInfo not implemented yet")
+
+class EncodedHeader(FieldSet):
+    def createFields(self):
+        yield Enum(UInt8(self, "id"), ID_INFO)
+        while not self.eof:
+            addr = self.absolute_address+self.current_size
+            uid = self.stream.readBits(addr, 8, LITTLE_ENDIAN)
+            if uid == ID_END:
+                yield Enum(UInt8(self, "end_marker"), ID_INFO)
+                break
+            elif uid == ID_PACK_INFO:
+                yield PackInfo(self, "pack_info", ID_INFO[ID_PACK_INFO])
+            elif uid == ID_UNPACK_INFO:
+                yield UnpackInfo(self, "unpack_info", ID_INFO[ID_UNPACK_INFO])
+            elif uid == ID_SUBSTREAMS_INFO:
+                yield SubStreamInfo(self, "substreams_info", ID_INFO[ID_SUBSTREAMS_INFO])
+            else:
+                self.info("Unexpected ID (%i)" % uid)
+                break
+
+class IDHeader(FieldSet):
+    def createFields(self):
+        yield Enum(UInt8(self, "id"), ID_INFO)
+        ParserError("IDHeader not implemented")
+
+class NextHeader(FieldSet):
+    def __init__(self, parent, name, desc="Next header"):
+        FieldSet.__init__(self, parent, name, desc)
+        self._size = 8*self["/signature/start_hdr/next_hdr_size"].value
+    # Less work, as much interpretable information as the other
+    # version... what an obnoxious format
+    def createFields2(self):
+        yield Enum(UInt8(self, "header_type"), ID_INFO)
+        yield RawBytes(self, "header_data", self._size-1)
+    def createFields(self):
+        uid = self.stream.readBits(self.absolute_address, 8, LITTLE_ENDIAN)
+        if uid == ID_HEADER:
+            yield IDHeader(self, "header", ID_INFO[ID_HEADER])
+        elif uid == ID_ENCODED_HEADER:
+            yield EncodedHeader(self, "encoded_hdr", ID_INFO[ID_ENCODED_HEADER])
+            # Game Over: this is usually encoded using LZMA, not copy
+            # See SzReadAndDecodePackedStreams/SzDecode being called with the
+            # data position from "/next_hdr/encoded_hdr/pack_info/pack_pos"
+            # We should process further, yet we can't...
+        else:
+            ParserError("Unexpected ID %u" % uid)
+        size = self._size - self.current_size
+        if size > 0:
+            yield RawBytes(self, "next_hdr_data", size//8, "Next header's data")
+
+class Body(FieldSet):
+    def __init__(self, parent, name, desc="Body data"):
+        FieldSet.__init__(self, parent, name, desc)
+        self._size = 8*self["/signature/start_hdr/next_hdr_offset"].value
+    def createFields(self):
+        if "encoded_hdr" in self["/next_hdr/"]:
+            pack_size = sum([s.value for s in self.array("/next_hdr/encoded_hdr/pack_info/pack_size")])
+            body_size = self["/next_hdr/encoded_hdr/pack_info/pack_pos"].value
+            yield RawBytes(self, "compressed_data", body_size, "Compressed data")
+            # Here we could check if copy method was used to "compress" it,
+            # but this never happens, so just output "compressed file info"
+            yield RawBytes(self, "compressed_file_info", pack_size,
+                           "Compressed file information")
+            size = (self._size//8) - pack_size - body_size
+            if size > 0:
+                yield RawBytes(self, "unknown_data", size)
+        elif "header" in self["/next_hdr"]:
+            yield RawBytes(self, "compressed_data", self._size//8, "Compressed data")
+
+class StartHeader(FieldSet):
+    static_size = 160
+    def createFields(self):
+        yield textHandler(UInt64(self, "next_hdr_offset",
+            "Next header offset"), hexadecimal)
+        yield UInt64(self, "next_hdr_size", "Next header size")
+        yield textHandler(UInt32(self, "next_hdr_crc",
+            "Next header CRC"), hexadecimal)
+
+class SignatureHeader(FieldSet):
+    static_size = 96 + StartHeader.static_size
+    def createFields(self):
+        yield Bytes(self, "signature", 6, "Signature Header")
+        yield UInt8(self, "major_ver", "Archive major version")
+        yield UInt8(self, "minor_ver", "Archive minor version")
+        yield textHandler(UInt32(self, "start_hdr_crc",
+            "Start header CRC"), hexadecimal)
+        yield StartHeader(self, "start_hdr", "Start header")
+
+class SevenZipParser(Parser):
+    PARSER_TAGS = {
+        "id": "7zip",
+        "category": "archive",
+        "file_ext": ("7z",),
+        "mime": (u"application/x-7z-compressed",),
+        "min_size": 32*8,
+        "magic": (("7z\xbc\xaf\x27\x1c", 0),),
+        "description": "Compressed archive in 7z format"
+    }
+    endian = LITTLE_ENDIAN
+
+    def createFields(self):
+        yield SignatureHeader(self, "signature", "Signature Header")
+        yield Body(self, "body_data")
+        yield NextHeader(self, "next_hdr")
+
+    def validate(self):
+        if self.stream.readBytes(0,6) != "7z\xbc\xaf'\x1c":
+            return "Invalid signature"
+        return True
+
+    def createContentSize(self):
+        size = self["/signature/start_hdr/next_hdr_offset"].value
+        size += self["/signature/start_hdr/next_hdr_size"].value
+        size += 12 # Signature size
+        size += 20 # Start header size
+        return size*8
diff --git a/lib/hachoir_parser/archive/tar.py b/lib/hachoir_parser/archive/tar.py
new file mode 100644
index 0000000000000000000000000000000000000000..08a9040be689fc6c3cded8ad2302b7e2495a2256
--- /dev/null
+++ b/lib/hachoir_parser/archive/tar.py
@@ -0,0 +1,124 @@
+"""
+Tar archive parser.
+
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    Enum, UInt8, SubFile, String, NullBytes)
+from hachoir_core.tools import humanFilesize, paddingSize, timestampUNIX
+from hachoir_core.endian import BIG_ENDIAN
+import re
+
+class FileEntry(FieldSet):
+    type_name = {
+        # 48 is "0", 49 is "1", ...
+         0: u"Normal disk file (old format)",
+        48: u"Normal disk file",
+        49: u"Link to previously dumped file",
+        50: u"Symbolic link",
+        51: u"Character special file",
+        52: u"Block special file",
+        53: u"Directory",
+        54: u"FIFO special file",
+        55: u"Contiguous file"
+    }
+
+    def getOctal(self, name):
+        return self.octal2int(self[name].value)
+
+    def getDatetime(self):
+        """
+        Create modification date as Unicode string, may raise ValueError.
+        """
+        timestamp = self.getOctal("mtime")
+        return timestampUNIX(timestamp)
+
+    def createFields(self):
+        yield String(self, "name", 100, "Name", strip="\0", charset="ISO-8859-1")
+        yield String(self, "mode", 8, "Mode", strip=" \0", charset="ASCII")
+        yield String(self, "uid", 8, "User ID", strip=" \0", charset="ASCII")
+        yield String(self, "gid", 8, "Group ID", strip=" \0", charset="ASCII")
+        yield String(self, "size", 12, "Size", strip=" \0", charset="ASCII")
+        yield String(self, "mtime", 12, "Modification time", strip=" \0", charset="ASCII")
+        yield String(self, "check_sum", 8, "Check sum", strip=" \0", charset="ASCII")
+        yield Enum(UInt8(self, "type", "Type"), self.type_name)
+        yield String(self, "lname", 100, "Link name", strip=" \0", charset="ISO-8859-1")
+        yield String(self, "magic", 8, "Magic", strip=" \0", charset="ASCII")
+        yield String(self, "uname", 32, "User name", strip=" \0", charset="ISO-8859-1")
+        yield String(self, "gname", 32, "Group name", strip=" \0", charset="ISO-8859-1")
+        yield String(self, "devmajor", 8, "Dev major", strip=" \0", charset="ASCII")
+        yield String(self, "devminor", 8, "Dev minor", strip=" \0", charset="ASCII")
+        yield NullBytes(self, "padding", 167, "Padding (zero)")
+
+        filesize = self.getOctal("size")
+        if filesize:
+            yield SubFile(self, "content", filesize, filename=self["name"].value)
+
+        size = paddingSize(self.current_size//8, 512)
+        if size:
+            yield NullBytes(self, "padding_end", size, "Padding (512 align)")
+
+    def convertOctal(self, chunk):
+        return self.octal2int(chunk.value)
+
+    def isEmpty(self):
+        return self["name"].value == ""
+
+    def octal2int(self, text):
+        try:
+            return int(text, 8)
+        except ValueError:
+            return 0
+
+    def createDescription(self):
+        if self.isEmpty():
+            desc = "(terminator, empty header)"
+        else:
+            filename = self["name"].value
+            filesize = humanFilesize(self.getOctal("size"))
+            desc = "(%s: %s, %s)" % \
+                (filename, self["type"].display, filesize)
+        return "Tar File " + desc
+
+class TarFile(Parser):
+    endian = BIG_ENDIAN
+    PARSER_TAGS = {
+        "id": "tar",
+        "category": "archive",
+        "file_ext": ("tar",),
+        "mime": (u"application/x-tar", u"application/x-gtar"),
+        "min_size": 512*8,
+        "magic": (("ustar  \0", 257*8),),
+        "subfile": "skip",
+        "description": "TAR archive",
+    }
+    _sign = re.compile("ustar *\0|[ \0]*$")
+
+    def validate(self):
+        if not self._sign.match(self.stream.readBytes(257*8, 8)):
+            return "Invalid magic number"
+        if self[0].name == "terminator":
+            return "Don't contain any file"
+        try:
+            int(self["file[0]/uid"].value, 8)
+            int(self["file[0]/gid"].value, 8)
+            int(self["file[0]/size"].value, 8)
+        except ValueError:
+            return "Invalid file size"
+        return True
+
+    def createFields(self):
+        while not self.eof:
+            field = FileEntry(self, "file[]")
+            if field.isEmpty():
+                yield NullBytes(self, "terminator", 512)
+                break
+            yield field
+        if self.current_size < self._size:
+            yield self.seekBit(self._size, "end")
+
+    def createContentSize(self):
+        return self["terminator"].address + self["terminator"].size
+
diff --git a/lib/hachoir_parser/archive/zip.py b/lib/hachoir_parser/archive/zip.py
new file mode 100644
index 0000000000000000000000000000000000000000..b3cd54a59965c2d6092075077f8c6ee298fa91fc
--- /dev/null
+++ b/lib/hachoir_parser/archive/zip.py
@@ -0,0 +1,427 @@
+"""
+Zip splitter.
+
+Status: can read most important headers
+Authors: Christophe Gisquet and Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    Bit, Bits, Enum,
+    TimeDateMSDOS32, SubFile,
+    UInt8, UInt16, UInt32, UInt64,
+    String, PascalString16,
+    RawBytes)
+from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal
+from hachoir_core.error import HACHOIR_ERRORS
+from hachoir_core.tools import makeUnicode
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_parser.common.deflate import Deflate
+
+MAX_FILESIZE = 1000 * 1024 * 1024
+
+COMPRESSION_DEFLATE = 8
+COMPRESSION_METHOD = {
+     0: u"no compression",
+     1: u"Shrunk",
+     2: u"Reduced (factor 1)",
+     3: u"Reduced (factor 2)",
+     4: u"Reduced (factor 3)",
+     5: u"Reduced (factor 4)",
+     6: u"Imploded",
+     7: u"Tokenizing",
+     8: u"Deflate",
+     9: u"Deflate64",
+    10: u"PKWARE Imploding",
+    11: u"Reserved by PKWARE",
+    12: u"File is compressed using BZIP2 algorithm",
+    13: u"Reserved by PKWARE",
+    14: u"LZMA (EFS)",
+    15: u"Reserved by PKWARE",
+    16: u"Reserved by PKWARE",
+    17: u"Reserved by PKWARE",
+    18: u"File is compressed using IBM TERSE (new)",
+    19: u"IBM LZ77 z Architecture (PFS)",
+    98: u"PPMd version I, Rev 1",
+}
+
+def ZipRevision(field):
+    return "%u.%u" % divmod(field.value, 10)
+
+class ZipVersion(FieldSet):
+    static_size = 16
+    HOST_OS = {
+         0: u"FAT file system (DOS, OS/2, NT)",
+         1: u"Amiga",
+         2: u"VMS (VAX or Alpha AXP)",
+         3: u"Unix",
+         4: u"VM/CMS",
+         5: u"Atari",
+         6: u"HPFS file system (OS/2, NT 3.x)",
+         7: u"Macintosh",
+         8: u"Z-System",
+         9: u"CP/M",
+        10: u"TOPS-20",
+        11: u"NTFS file system (NT)",
+        12: u"SMS/QDOS",
+        13: u"Acorn RISC OS",
+        14: u"VFAT file system (Win95, NT)",
+        15: u"MVS",
+        16: u"BeOS (BeBox or PowerMac)",
+        17: u"Tandem",
+    }
+    def createFields(self):
+        yield textHandler(UInt8(self, "zip_version", "ZIP version"), ZipRevision)
+        yield Enum(UInt8(self, "host_os", "ZIP Host OS"), self.HOST_OS)
+
+class ZipGeneralFlags(FieldSet):
+    static_size = 16
+    def createFields(self):
+        # Need the compression info from the parent, and that is the byte following
+        method = self.stream.readBits(self.absolute_address+16, 16, LITTLE_ENDIAN)
+
+        yield Bits(self, "unused[]", 2, "Unused")
+        yield Bit(self, "encrypted_central_dir", "Selected data values in the Local Header are masked")
+        yield Bit(self, "incomplete", "Reserved by PKWARE for enhanced compression.")
+        yield Bit(self, "uses_unicode", "Filename and comments are in UTF-8")
+        yield Bits(self, "unused[]", 4, "Unused")
+        yield Bit(self, "strong_encrypt", "Strong encryption (version >= 50)")
+        yield Bit(self, "is_patched", "File is compressed with patched data?")
+        yield Bit(self, "enhanced_deflate", "Reserved for use with method 8")
+        yield Bit(self, "has_descriptor",
+                  "Compressed data followed by descriptor?")
+        if method == 6:
+            yield Bit(self, "use_8k_sliding", "Use 8K sliding dictionary (instead of 4K)")
+            yield Bit(self, "use_3shannon", "Use a 3 Shannon-Fano tree (instead of 2 Shannon-Fano)")
+        elif method in (8, 9):
+            NAME = {
+                0: "Normal compression",
+                1: "Maximum compression",
+                2: "Fast compression",
+                3: "Super Fast compression"
+            }
+            yield Enum(Bits(self, "method", 2), NAME)
+        elif method == 14: #LZMA
+            yield Bit(self, "lzma_eos", "LZMA stream is ended with a EndOfStream marker")
+            yield Bit(self, "unused[]")
+        else:
+            yield Bits(self, "compression_info", 2)
+        yield Bit(self, "is_encrypted", "File is encrypted?")
+
+class ExtraField(FieldSet):
+    EXTRA_FIELD_ID = {
+        0x0007: "AV Info",
+        0x0009: "OS/2 extended attributes (also Info-ZIP)",
+        0x000a: "PKWARE Win95/WinNT FileTimes", # undocumented!
+        0x000c: "PKWARE VAX/VMS (also Info-ZIP)",
+        0x000d: "PKWARE Unix",
+        0x000f: "Patch Descriptor",
+        0x07c8: "Info-ZIP Macintosh (old, J. Lee)",
+        0x2605: "ZipIt Macintosh (first version)",
+        0x2705: "ZipIt Macintosh v 1.3.5 and newer (w/o full filename)",
+        0x334d: "Info-ZIP Macintosh (new, D. Haase Mac3 field)",
+        0x4341: "Acorn/SparkFS (David Pilling)",
+        0x4453: "Windows NT security descriptor (binary ACL)",
+        0x4704: "VM/CMS",
+        0x470f: "MVS",
+        0x4b46: "FWKCS MD5 (third party, see below)",
+        0x4c41: "OS/2 access control list (text ACL)",
+        0x4d49: "Info-ZIP VMS (VAX or Alpha)",
+        0x5356: "AOS/VS (binary ACL)",
+        0x5455: "extended timestamp",
+        0x5855: "Info-ZIP Unix (original; also OS/2, NT, etc.)",
+        0x6542: "BeOS (BeBox, PowerMac, etc.)",
+        0x756e: "ASi Unix",
+        0x7855: "Info-ZIP Unix (new)",
+        0xfb4a: "SMS/QDOS",
+    }
+    def createFields(self):
+        yield Enum(UInt16(self, "field_id", "Extra field ID"),
+                   self.EXTRA_FIELD_ID)
+        size = UInt16(self, "field_data_size", "Extra field data size")
+        yield size
+        if size.value > 0:
+            yield RawBytes(self, "field_data", size, "Unknown field data")
+
+def ZipStartCommonFields(self):
+    yield ZipVersion(self, "version_needed", "Version needed")
+    yield ZipGeneralFlags(self, "flags", "General purpose flag")
+    yield Enum(UInt16(self, "compression", "Compression method"),
+               COMPRESSION_METHOD)
+    yield TimeDateMSDOS32(self, "last_mod", "Last modification file time")
+    yield textHandler(UInt32(self, "crc32", "CRC-32"), hexadecimal)
+    yield UInt32(self, "compressed_size", "Compressed size")
+    yield UInt32(self, "uncompressed_size", "Uncompressed size")
+    yield UInt16(self, "filename_length", "Filename length")
+    yield UInt16(self, "extra_length", "Extra fields length")
+
+def zipGetCharset(self):
+    if self["flags/uses_unicode"].value:
+        return "UTF-8"
+    else:
+        return "ISO-8859-15"
+
+class ZipCentralDirectory(FieldSet):
+    HEADER = 0x02014b50
+    def createFields(self):
+        yield ZipVersion(self, "version_made_by", "Version made by")
+        for field in ZipStartCommonFields(self):
+            yield field
+
+        # Check unicode status
+        charset = zipGetCharset(self)
+
+        yield UInt16(self, "comment_length", "Comment length")
+        yield UInt16(self, "disk_number_start", "Disk number start")
+        yield UInt16(self, "internal_attr", "Internal file attributes")
+        yield UInt32(self, "external_attr", "External file attributes")
+        yield UInt32(self, "offset_header", "Relative offset of local header")
+        yield String(self, "filename", self["filename_length"].value,
+                     "Filename", charset=charset)
+        if 0 < self["extra_length"].value:
+            yield RawBytes(self, "extra", self["extra_length"].value,
+                           "Extra fields")
+        if 0 < self["comment_length"].value:
+            yield String(self, "comment", self["comment_length"].value,
+                         "Comment", charset=charset)
+
+    def createDescription(self):
+        return "Central directory: %s" % self["filename"].display
+
+class Zip64EndCentralDirectory(FieldSet):
+    HEADER = 0x06064b50
+    def createFields(self):
+        yield UInt64(self, "zip64_end_size",
+                     "Size of zip64 end of central directory record")
+        yield ZipVersion(self, "version_made_by", "Version made by")
+        yield ZipVersion(self, "version_needed", "Version needed to extract")
+        yield UInt32(self, "number_disk", "Number of this disk")
+        yield UInt32(self, "number_disk2",
+                     "Number of the disk with the start of the central directory")
+        yield UInt64(self, "number_entries",
+                     "Total number of entries in the central directory on this disk")
+        yield UInt64(self, "number_entries2",
+                     "Total number of entries in the central directory")
+        yield UInt64(self, "size", "Size of the central directory")
+        yield UInt64(self, "offset", "Offset of start of central directory")
+        if 0 < self["zip64_end_size"].value:
+            yield RawBytes(self, "data_sector", self["zip64_end_size"].value,
+                           "zip64 extensible data sector")
+
+class ZipEndCentralDirectory(FieldSet):
+    HEADER = 0x06054b50
+    def createFields(self):
+        yield UInt16(self, "number_disk", "Number of this disk")
+        yield UInt16(self, "number_disk2", "Number in the central dir")
+        yield UInt16(self, "total_number_disk",
+                     "Total number of entries in this disk")
+        yield UInt16(self, "total_number_disk2",
+                     "Total number of entries in the central dir")
+        yield UInt32(self, "size", "Size of the central directory")
+        yield UInt32(self, "offset", "Offset of start of central directory")
+        yield PascalString16(self, "comment", "ZIP comment")
+
+class ZipDataDescriptor(FieldSet):
+    HEADER_STRING = "\x50\x4B\x07\x08"
+    HEADER = 0x08074B50
+    static_size = 96
+    def createFields(self):
+        yield textHandler(UInt32(self, "file_crc32",
+            "Checksum (CRC32)"), hexadecimal)
+        yield filesizeHandler(UInt32(self, "file_compressed_size",
+            "Compressed size (bytes)"))
+        yield filesizeHandler(UInt32(self, "file_uncompressed_size",
+             "Uncompressed size (bytes)"))
+
+class FileEntry(FieldSet):
+    HEADER = 0x04034B50
+    filename = None
+
+    def data(self, size):
+        compression = self["compression"].value
+        if compression == 0:
+            return SubFile(self, "data", size, filename=self.filename)
+        compressed = SubFile(self, "compressed_data", size, filename=self.filename)
+        if compression == COMPRESSION_DEFLATE:
+            return Deflate(compressed)
+        else:
+            return compressed
+
+    def resync(self):
+        # Non-seekable output, search the next data descriptor
+        size = self.stream.searchBytesLength(ZipDataDescriptor.HEADER_STRING, False,
+                                            self.absolute_address+self.current_size)
+        if size <= 0:
+            raise ParserError("Couldn't resync to %s" %
+                              ZipDataDescriptor.HEADER_STRING)
+        yield self.data(size)
+        yield textHandler(UInt32(self, "header[]", "Header"), hexadecimal)
+        data_desc = ZipDataDescriptor(self, "data_desc", "Data descriptor")
+        #self.info("Resynced!")
+        yield data_desc
+        # The above could be checked anytime, but we prefer trying parsing
+        # than aborting
+        if self["crc32"].value == 0 and \
+            data_desc["file_compressed_size"].value != size:
+            raise ParserError("Bad resync: position=>%i but data_desc=>%i" %
+                              (size, data_desc["file_compressed_size"].value))
+
+    def createFields(self):
+        for field in ZipStartCommonFields(self):
+            yield field
+        length = self["filename_length"].value
+
+
+        if length:
+            filename = String(self, "filename", length, "Filename",
+                              charset=zipGetCharset(self))
+            yield filename
+            self.filename = filename.value
+        if self["extra_length"].value:
+            yield RawBytes(self, "extra", self["extra_length"].value, "Extra")
+        size = self["compressed_size"].value
+        if size > 0:
+            yield self.data(size)
+        elif self["flags/incomplete"].value:
+            for field in self.resync():
+                yield field
+        if self["flags/has_descriptor"].value:
+            yield ZipDataDescriptor(self, "data_desc", "Data descriptor")
+
+    def createDescription(self):
+        return "File entry: %s (%s)" % \
+            (self["filename"].value, self["compressed_size"].display)
+
+    def validate(self):
+        if self["compression"].value not in COMPRESSION_METHOD:
+            return "Unknown compression method (%u)" % self["compression"].value
+        return ""
+
+class ZipSignature(FieldSet):
+    HEADER = 0x05054B50
+    def createFields(self):
+        yield PascalString16(self, "signature", "Signature")
+
+class Zip64EndCentralDirectoryLocator(FieldSet):
+    HEADER = 0x07064b50
+    def createFields(self):
+        yield UInt32(self, "disk_number", \
+                     "Number of the disk with the start of the zip64 end of central directory")
+        yield UInt64(self, "relative_offset", \
+                     "Relative offset of the zip64 end of central directory record")
+        yield UInt32(self, "disk_total_number", "Total number of disks")
+
+
+class ZipFile(Parser):
+    endian = LITTLE_ENDIAN
+    MIME_TYPES = {
+        # Default ZIP archive
+        u"application/zip": "zip",
+        u"application/x-zip": "zip",
+
+        # Java archive (JAR)
+        u"application/x-jar": "jar",
+        u"application/java-archive": "jar",
+
+        # OpenOffice 1.0
+        u"application/vnd.sun.xml.calc": "sxc",
+        u"application/vnd.sun.xml.draw": "sxd",
+        u"application/vnd.sun.xml.impress": "sxi",
+        u"application/vnd.sun.xml.writer": "sxw",
+        u"application/vnd.sun.xml.math": "sxm",
+
+        # OpenOffice 1.0 (template)
+        u"application/vnd.sun.xml.calc.template": "stc",
+        u"application/vnd.sun.xml.draw.template": "std",
+        u"application/vnd.sun.xml.impress.template": "sti",
+        u"application/vnd.sun.xml.writer.template": "stw",
+        u"application/vnd.sun.xml.writer.global": "sxg",
+
+        # OpenDocument
+        u"application/vnd.oasis.opendocument.chart": "odc",
+        u"application/vnd.oasis.opendocument.image": "odi",
+        u"application/vnd.oasis.opendocument.database": "odb",
+        u"application/vnd.oasis.opendocument.formula": "odf",
+        u"application/vnd.oasis.opendocument.graphics": "odg",
+        u"application/vnd.oasis.opendocument.presentation": "odp",
+        u"application/vnd.oasis.opendocument.spreadsheet": "ods",
+        u"application/vnd.oasis.opendocument.text": "odt",
+        u"application/vnd.oasis.opendocument.text-master": "odm",
+
+        # OpenDocument (template)
+        u"application/vnd.oasis.opendocument.graphics-template": "otg",
+        u"application/vnd.oasis.opendocument.presentation-template": "otp",
+        u"application/vnd.oasis.opendocument.spreadsheet-template": "ots",
+        u"application/vnd.oasis.opendocument.text-template": "ott",
+    }
+    PARSER_TAGS = {
+        "id": "zip",
+        "category": "archive",
+        "file_ext": tuple(MIME_TYPES.itervalues()),
+        "mime": tuple(MIME_TYPES.iterkeys()),
+        "magic": (("PK\3\4", 0),),
+        "subfile": "skip",
+        "min_size": (4 + 26)*8, # header + file entry
+        "description": "ZIP archive"
+    }
+
+    def validate(self):
+        if self["header[0]"].value != FileEntry.HEADER:
+            return "Invalid magic"
+        try:
+            file0 = self["file[0]"]
+        except HACHOIR_ERRORS, err:
+            return "Unable to get file #0"
+        err = file0.validate()
+        if err:
+            return "File #0: %s" % err
+        return True
+
+    def createFields(self):
+        # File data
+        self.signature = None
+        self.central_directory = []
+        while not self.eof:
+            header = textHandler(UInt32(self, "header[]", "Header"), hexadecimal)
+            yield header
+            header = header.value
+            if header == FileEntry.HEADER:
+                yield FileEntry(self, "file[]")
+            elif header == ZipDataDescriptor.HEADER:
+                yield ZipDataDescriptor(self, "spanning[]")
+            elif header == 0x30304b50:
+                yield ZipDataDescriptor(self, "temporary_spanning[]")
+            elif header == ZipCentralDirectory.HEADER:
+                yield ZipCentralDirectory(self, "central_directory[]")
+            elif header == ZipEndCentralDirectory.HEADER:
+                yield ZipEndCentralDirectory(self, "end_central_directory", "End of central directory")
+            elif header == Zip64EndCentralDirectory.HEADER:
+                yield Zip64EndCentralDirectory(self, "end64_central_directory", "ZIP64 end of central directory")
+            elif header == ZipSignature.HEADER:
+                yield ZipSignature(self, "signature", "Signature")
+            elif header == Zip64EndCentralDirectoryLocator.HEADER:
+                yield Zip64EndCentralDirectoryLocator(self, "end_locator", "ZIP64 Enf of central directory locator")
+            else:
+                raise ParserError("Error, unknown ZIP header (0x%08X)." % header)
+
+    def createMimeType(self):
+        if self["file[0]/filename"].value == "mimetype":
+            return makeUnicode(self["file[0]/data"].value)
+        else:
+            return u"application/zip"
+
+    def createFilenameSuffix(self):
+        if self["file[0]/filename"].value == "mimetype":
+            mime = self["file[0]/compressed_data"].value
+            if mime in self.MIME_TYPES:
+                return "." + self.MIME_TYPES[mime]
+        return ".zip"
+
+    def createContentSize(self):
+        start = 0
+        end = MAX_FILESIZE * 8
+        end = self.stream.searchBytes("PK\5\6", start, end)
+        if end is not None:
+            return end + 22*8
+        return None
+
diff --git a/lib/hachoir_parser/audio/8svx.py b/lib/hachoir_parser/audio/8svx.py
new file mode 100644
index 0000000000000000000000000000000000000000..109d03dae0348e4ab3d446e2ff16d98b316aea5c
--- /dev/null
+++ b/lib/hachoir_parser/audio/8svx.py
@@ -0,0 +1,126 @@
+"""
+Audio Interchange File Format (AIFF) parser.
+
+Author: Victor Stinner
+Creation: 27 december 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt16, UInt32, Float80, TimestampMac32,
+    RawBytes, NullBytes,
+    String, Enum, PascalString32)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import filesizeHandler
+from hachoir_core.tools import alignValue
+from hachoir_parser.audio.id3 import ID3v2
+
+CODEC_NAME = {
+    'ACE2': u"ACE 2-to-1",
+    'ACE8': u"ACE 8-to-3",
+    'MAC3': u"MAC 3-to-1",
+    'MAC6': u"MAC 6-to-1",
+    'NONE': u"None",
+    'sowt': u"Little-endian, no compression",
+}
+
+class Comment(FieldSet):
+    def createFields(self):
+        yield TimestampMac32(self, "timestamp")
+        yield PascalString32(self, "text")
+
+def parseText(self):
+    yield String(self, "text", self["size"].value)
+
+def parseID3(self):
+    yield ID3v2(self, "id3v2", size=self["size"].value*8)
+
+def parseComment(self):
+    yield UInt16(self, "nb_comment")
+    for index in xrange(self["nb_comment"].value):
+        yield Comment(self, "comment[]")
+
+def parseCommon(self):
+    yield UInt16(self, "nb_channel")
+    yield UInt32(self, "nb_sample")
+    yield UInt16(self, "sample_size")
+    yield Float80(self, "sample_rate")
+    yield Enum(String(self, "codec", 4, strip="\0", charset="ASCII"), CODEC_NAME)
+
+def parseVersion(self):
+    yield TimestampMac32(self, "timestamp")
+
+def parseSound(self):
+    yield UInt32(self, "offset")
+    yield UInt32(self, "block_size")
+    size = (self.size - self.current_size) // 8
+    if size:
+        yield RawBytes(self, "data", size)
+
+class Chunk(FieldSet):
+    TAG_INFO = {
+        'COMM': ('common', "Common chunk", parseCommon),
+        'COMT': ('comment', "Comment", parseComment),
+        'NAME': ('name', "Name", parseText),
+        'AUTH': ('author', "Author", parseText),
+        'FVER': ('version', "Version", parseVersion),
+        'SSND': ('sound', "Sound data", parseSound),
+        'ID3 ': ('id3', "ID3", parseID3),
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = (8 + alignValue(self["size"].value, 2)) * 8
+        tag = self["type"].value
+        if tag in self.TAG_INFO:
+            self._name, self._description, self._parser = self.TAG_INFO[tag]
+        else:
+            self._parser = None
+
+    def createFields(self):
+        yield String(self, "type", 4, "Signature (FORM)", charset="ASCII")
+        yield filesizeHandler(UInt32(self, "size"))
+        size = self["size"].value
+        if size:
+            if self._parser:
+                for field in self._parser(self):
+                    yield field
+                if size % 2:
+                    yield NullBytes(self, "padding", 1)
+            else:
+                yield RawBytes(self, "data", size)
+
+class HeightSVX(Parser):
+    PARSER_TAGS = {
+        "id": "8svx",
+        "category": "audio",
+        "file_ext": ("8svx",),
+        "mime": (u"audio/x-aiff",),
+        "min_size": 12*8,
+        "description": "8SVX (audio) format"
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != "FORM":
+            return "Invalid signature"
+        if self.stream.readBytes(8*8, 4) != "8SVX":
+            return "Invalid type"
+        return True
+
+    def createFields(self):
+        yield String(self, "signature", 4, "Signature (FORM)", charset="ASCII")
+        yield filesizeHandler(UInt32(self, "filesize"))
+        yield String(self, "type", 4, "Form type (AIFF or AIFC)", charset="ASCII")
+        while not self.eof:
+            yield Chunk(self, "chunk[]")
+
+    def createDescription(self):
+        if self["type"].value == "AIFC":
+            return "Audio Interchange File Format Compressed (AIFC)"
+        else:
+            return "Audio Interchange File Format (AIFF)"
+
+    def createContentSize(self):
+        return self["filesize"].value * 8
+
diff --git a/lib/hachoir_parser/audio/__init__.py b/lib/hachoir_parser/audio/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1cc33a23540a5b246fe7a64feff523fe16bc7222
--- /dev/null
+++ b/lib/hachoir_parser/audio/__init__.py
@@ -0,0 +1,12 @@
+from hachoir_parser.audio.aiff import AiffFile
+from hachoir_parser.audio.au import AuFile
+from hachoir_parser.audio.itunesdb import ITunesDBFile
+from hachoir_parser.audio.midi import MidiFile
+from hachoir_parser.audio.mpeg_audio import MpegAudioFile
+from hachoir_parser.audio.real_audio import RealAudioFile
+from hachoir_parser.audio.xm import XMModule
+from hachoir_parser.audio.s3m import S3MModule
+from hachoir_parser.audio.s3m import PTMModule
+from hachoir_parser.audio.mod import AmigaModule
+from hachoir_parser.audio.flac import FlacParser
+
diff --git a/lib/hachoir_parser/audio/aiff.py b/lib/hachoir_parser/audio/aiff.py
new file mode 100644
index 0000000000000000000000000000000000000000..d8f41696715be935615bcb52d64e62f34b59cf66
--- /dev/null
+++ b/lib/hachoir_parser/audio/aiff.py
@@ -0,0 +1,127 @@
+"""
+Audio Interchange File Format (AIFF) parser.
+
+Author: Victor Stinner
+Creation: 27 december 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt16, UInt32, Float80, TimestampMac32,
+    RawBytes, NullBytes,
+    String, Enum, PascalString32)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import filesizeHandler
+from hachoir_core.tools import alignValue
+from hachoir_parser.audio.id3 import ID3v2
+
+CODEC_NAME = {
+    'ACE2': u"ACE 2-to-1",
+    'ACE8': u"ACE 8-to-3",
+    'MAC3': u"MAC 3-to-1",
+    'MAC6': u"MAC 6-to-1",
+    'NONE': u"None",
+    'sowt': u"Little-endian, no compression",
+}
+
+class Comment(FieldSet):
+    def createFields(self):
+        yield TimestampMac32(self, "timestamp")
+        yield PascalString32(self, "text")
+
+def parseText(self):
+    yield String(self, "text", self["size"].value)
+
+def parseID3(self):
+    yield ID3v2(self, "id3v2", size=self["size"].value*8)
+
+def parseComment(self):
+    yield UInt16(self, "nb_comment")
+    for index in xrange(self["nb_comment"].value):
+        yield Comment(self, "comment[]")
+
+def parseCommon(self):
+    yield UInt16(self, "nb_channel")
+    yield UInt32(self, "nb_sample")
+    yield UInt16(self, "sample_size")
+    yield Float80(self, "sample_rate")
+    yield Enum(String(self, "codec", 4, strip="\0", charset="ASCII"), CODEC_NAME)
+
+def parseVersion(self):
+    yield TimestampMac32(self, "timestamp")
+
+def parseSound(self):
+    yield UInt32(self, "offset")
+    yield UInt32(self, "block_size")
+    size = (self.size - self.current_size) // 8
+    if size:
+        yield RawBytes(self, "data", size)
+
+class Chunk(FieldSet):
+    TAG_INFO = {
+        'COMM': ('common', "Common chunk", parseCommon),
+        'COMT': ('comment', "Comment", parseComment),
+        'NAME': ('name', "Name", parseText),
+        'AUTH': ('author', "Author", parseText),
+        'FVER': ('version', "Version", parseVersion),
+        'SSND': ('sound', "Sound data", parseSound),
+        'ID3 ': ('id3', "ID3", parseID3),
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = (8 + alignValue(self["size"].value, 2)) * 8
+        tag = self["type"].value
+        if tag in self.TAG_INFO:
+            self._name, self._description, self._parser = self.TAG_INFO[tag]
+        else:
+            self._parser = None
+
+    def createFields(self):
+        yield String(self, "type", 4, "Signature (FORM)", charset="ASCII")
+        yield filesizeHandler(UInt32(self, "size"))
+        size = self["size"].value
+        if size:
+            if self._parser:
+                for field in self._parser(self):
+                    yield field
+                if size % 2:
+                    yield NullBytes(self, "padding", 1)
+            else:
+                yield RawBytes(self, "data", size)
+
+class AiffFile(Parser):
+    PARSER_TAGS = {
+        "id": "aiff",
+        "category": "audio",
+        "file_ext": ("aif", "aiff", "aifc"),
+        "mime": (u"audio/x-aiff",),
+        "magic_regex": (("FORM.{4}AIF[CF]", 0),),
+        "min_size": 12*8,
+        "description": "Audio Interchange File Format (AIFF)"
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != "FORM":
+            return "Invalid signature"
+        if self.stream.readBytes(8*8, 4) not in ("AIFF", "AIFC"):
+            return "Invalid type"
+        return True
+
+    def createFields(self):
+        yield String(self, "signature", 4, "Signature (FORM)", charset="ASCII")
+        yield filesizeHandler(UInt32(self, "filesize"))
+        yield String(self, "type", 4, "Form type (AIFF or AIFC)", charset="ASCII")
+        while not self.eof:
+            yield Chunk(self, "chunk[]")
+
+    def createDescription(self):
+        if self["type"].value == "AIFC":
+            return "Audio Interchange File Format Compressed (AIFC)"
+        else:
+            return "Audio Interchange File Format (AIFF)"
+
+    def createContentSize(self):
+        return self["filesize"].value * 8
+
diff --git a/lib/hachoir_parser/audio/au.py b/lib/hachoir_parser/audio/au.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab9d9c11a53d3533ad1f97d1c04d98e00440e2c1
--- /dev/null
+++ b/lib/hachoir_parser/audio/au.py
@@ -0,0 +1,88 @@
+"""
+AU audio file parser
+
+Author: Victor Stinner
+Creation: 12 july 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import UInt32, Enum, String, RawBytes
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import displayHandler, filesizeHandler
+from hachoir_core.tools import createDict, humanFrequency
+
+class AuFile(Parser):
+    PARSER_TAGS = {
+        "id": "sun_next_snd",
+        "category": "audio",
+        "file_ext": ("au", "snd"),
+        "mime": (u"audio/basic",),
+        "min_size": 24*8,
+        "magic": ((".snd", 0),),
+        "description": "Sun/NeXT audio"
+    }
+    endian = BIG_ENDIAN
+
+    CODEC_INFO = {
+        1: (8,    u"8-bit ISDN u-law"),
+        2: (8,    u"8-bit linear PCM"),
+        3: (16,   u"16-bit linear PCM"),
+        4: (24,   u"24-bit linear PCM"),
+        5: (32,   u"32-bit linear PCM"),
+        6: (32,   u"32-bit IEEE floating point"),
+        7: (64,   u"64-bit IEEE floating point"),
+        8: (None, u"Fragmented sample data"),
+        9: (None, u"DSP program"),
+       10: (8,    u"8-bit fixed point"),
+       11: (16,   u"16-bit fixed point"),
+       12: (24,   u"24-bit fixed point"),
+       13: (32,   u"32-bit fixed point"),
+       18: (16,   u"16-bit linear with emphasis"),
+       19: (16,   u"16-bit linear compressed"),
+       20: (16,   u"16-bit linear with emphasis and compression"),
+       21: (None, u"Music kit DSP commands"),
+       23: (None, u"4-bit ISDN u-law compressed (CCITT G.721 ADPCM)"),
+       24: (None, u"ITU-T G.722 ADPCM"),
+       25: (None, u"ITU-T G.723 3-bit ADPCM"),
+       26: (None, u"ITU-T G.723 5-bit ADPCM"),
+       27: (8,    u"8-bit ISDN A-law"),
+    }
+
+    # Create bit rate and codec name dictionnaries
+    BITS_PER_SAMPLE = createDict(CODEC_INFO, 0)
+    CODEC_NAME = createDict(CODEC_INFO, 1)
+
+    VALID_NB_CHANNEL = set((1,2))   # FIXME: 4, 5, 7, 8 channels are supported?
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != ".snd":
+            return "Wrong file signature"
+        if self["channels"].value not in self.VALID_NB_CHANNEL:
+            return "Invalid number of channel"
+        return True
+
+    def getBitsPerSample(self):
+        """
+        Get bit rate (number of bit per sample per channel),
+        may returns None if you unable to compute it.
+        """
+        return self.BITS_PER_SAMPLE.get(self["codec"].value)
+
+    def createFields(self):
+        yield String(self, "signature", 4, 'Format signature (".snd")', charset="ASCII")
+        yield UInt32(self, "data_ofs", "Data offset")
+        yield filesizeHandler(UInt32(self, "data_size", "Data size"))
+        yield Enum(UInt32(self, "codec", "Audio codec"), self.CODEC_NAME)
+        yield displayHandler(UInt32(self, "sample_rate", "Number of samples/second"), humanFrequency)
+        yield UInt32(self, "channels", "Number of interleaved channels")
+
+        size = self["data_ofs"].value - self.current_size // 8
+        if 0 < size:
+            yield String(self, "info", size, "Information", strip=" \0", charset="ISO-8859-1")
+
+        size = min(self["data_size"].value, (self.size - self.current_size) // 8)
+        yield RawBytes(self, "audio_data", size, "Audio data")
+
+    def createContentSize(self):
+        return (self["data_ofs"].value + self["data_size"].value) * 8
+
diff --git a/lib/hachoir_parser/audio/flac.py b/lib/hachoir_parser/audio/flac.py
new file mode 100644
index 0000000000000000000000000000000000000000..f739ff70d24635826bcf0d76af520ce019c18470
--- /dev/null
+++ b/lib/hachoir_parser/audio/flac.py
@@ -0,0 +1,157 @@
+"""
+FLAC (audio) parser
+
+Documentation:
+
+ * http://flac.sourceforge.net/format.html
+
+Author: Esteban Loiseau <baal AT tuxfamily.org>
+Creation date: 2008-04-09
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import FieldSet, String, Bit, Bits, UInt16, UInt24, RawBytes, Enum, NullBytes
+from hachoir_core.stream import BIG_ENDIAN, LITTLE_ENDIAN
+from hachoir_core.tools import createDict
+from hachoir_parser.container.ogg import parseVorbisComment
+
+class VorbisComment(FieldSet):
+    endian = LITTLE_ENDIAN
+    createFields = parseVorbisComment
+
+class StreamInfo(FieldSet):
+    static_size = 34*8
+    def createFields(self):
+        yield UInt16(self, "min_block_size", "The minimum block size (in samples) used in the stream")
+        yield UInt16(self, "max_block_size", "The maximum block size (in samples) used in the stream")
+        yield UInt24(self, "min_frame_size", "The minimum frame size (in bytes) used in the stream")
+        yield UInt24(self, "max_frame_size", "The maximum frame size (in bytes) used in the stream")
+        yield Bits(self, "sample_hertz", 20, "Sample rate in Hertz")
+        yield Bits(self, "nb_channel", 3, "Number of channels minus one")
+        yield Bits(self, "bits_per_sample", 5, "Bits per sample minus one")
+        yield Bits(self, "total_samples", 36, "Total samples in stream")
+        yield RawBytes(self, "md5sum", 16, "MD5 signature of the unencoded audio data")
+
+class SeekPoint(FieldSet):
+    def createFields(self):
+        yield Bits(self, "sample_number", 64, "Sample number")
+        yield Bits(self, "offset", 64, "Offset in bytes")
+        yield Bits(self, "nb_sample", 16)
+
+class SeekTable(FieldSet):
+    def createFields(self):
+        while not self.eof:
+            yield SeekPoint(self, "point[]")
+
+class MetadataBlock(FieldSet):
+    "Metadata block field: http://flac.sourceforge.net/format.html#metadata_block"
+
+    BLOCK_TYPES = {
+        0: ("stream_info", u"Stream info", StreamInfo),
+        1: ("padding[]", u"Padding", None),
+        2: ("application[]", u"Application", None),
+        3: ("seek_table", u"Seek table", SeekTable),
+        4: ("comment", u"Vorbis comment", VorbisComment),
+        5: ("cue_sheet[]", u"Cue sheet", None),
+        6: ("picture[]", u"Picture", None),
+    }
+    BLOCK_TYPE_DESC = createDict(BLOCK_TYPES, 1)
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = 32 + self["metadata_length"].value * 8
+        try:
+            key = self["block_type"].value
+            self._name, self._description, self.handler = self.BLOCK_TYPES[key]
+        except KeyError:
+            self.handler = None
+
+    def createFields(self):
+        yield Bit(self, "last_metadata_block", "True if this is the last metadata block")
+        yield Enum(Bits(self, "block_type", 7, "Metadata block header type"), self.BLOCK_TYPE_DESC)
+        yield UInt24(self, "metadata_length", "Length of following metadata in bytes (doesn't include this header)")
+
+        block_type = self["block_type"].value
+        size = self["metadata_length"].value
+        if not size:
+            return
+        try:
+            handler = self.BLOCK_TYPES[block_type][2]
+        except KeyError:
+            handler = None
+        if handler:
+            yield handler(self, "content", size=size*8)
+        elif self["block_type"].value == 1:
+            yield NullBytes(self, "padding", size)
+        else:
+            yield RawBytes(self, "rawdata", size)
+
+class Metadata(FieldSet):
+    def createFields(self):
+        while not self.eof:
+            field = MetadataBlock(self,"metadata_block[]")
+            yield field
+            if field["last_metadata_block"].value:
+                break
+
+class Frame(FieldSet):
+    SAMPLE_RATES = {
+        0: "get from STREAMINFO metadata block",
+        1: "88.2kHz",
+        2: "176.4kHz",
+        3: "192kHz",
+        4: "8kHz",
+        5: "16kHz",
+        6: "22.05kHz",
+        7: "24kHz",
+        8: "32kHz",
+        9: "44.1kHz",
+        10: "48kHz",
+        11: "96kHz",
+        12: "get 8 bit sample rate (in kHz) from end of header",
+        13: "get 16 bit sample rate (in Hz) from end of header",
+        14: "get 16 bit sample rate (in tens of Hz) from end of header",
+    }
+
+    def createFields(self):
+        yield Bits(self, "sync", 14, "Sync code: 11111111111110")
+        yield Bit(self, "reserved[]")
+        yield Bit(self, "blocking_strategy")
+        yield Bits(self, "block_size", 4)
+        yield Enum(Bits(self, "sample_rate", 4), self.SAMPLE_RATES)
+        yield Bits(self, "channel_assign", 4)
+        yield Bits(self, "sample_size", 3)
+        yield Bit(self, "reserved[]")
+        # FIXME: Finish frame header parser
+
+class Frames(FieldSet):
+    def createFields(self):
+        while not self.eof:
+            yield Frame(self, "frame[]")
+            # FIXME: Parse all frames
+            return
+
+class FlacParser(Parser):
+    "Parse FLAC audio files: FLAC is a lossless audio codec"
+    MAGIC = "fLaC\x00"
+    PARSER_TAGS = {
+        "id": "flac",
+        "category": "audio",
+        "file_ext": ("flac",),
+        "mime": (u"audio/x-flac",),
+        "magic": ((MAGIC, 0),),
+        "min_size": 4*8,
+        "description": "FLAC audio",
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC:
+            return u"Invalid magic string"
+        return True
+
+    def createFields(self):
+        yield String(self, "signature", 4,charset="ASCII", description="FLAC signature: fLaC string")
+        yield Metadata(self,"metadata")
+        yield Frames(self,"frames")
+
diff --git a/lib/hachoir_parser/audio/id3.py b/lib/hachoir_parser/audio/id3.py
new file mode 100644
index 0000000000000000000000000000000000000000..3cfda25f9436dc76ca6ba5cc61c35210b4e88562
--- /dev/null
+++ b/lib/hachoir_parser/audio/id3.py
@@ -0,0 +1,507 @@
+"""
+ID3 metadata parser, supported versions: 1.O, 2.2, 2.3 and 2.4
+
+Informations: http://www.id3.org/
+
+Author: Victor Stinner
+"""
+
+from hachoir_core.field import (FieldSet, MatchError, ParserError,
+    Enum, UInt8, UInt24, UInt32,
+    CString, String, RawBytes,
+    Bit, Bits, NullBytes, NullBits)
+from hachoir_core.text_handler import textHandler
+from hachoir_core.tools import humanDuration
+from hachoir_core.endian import NETWORK_ENDIAN
+
+class ID3v1(FieldSet):
+    static_size = 128 * 8
+    GENRE_NAME = {
+          0: u"Blues",
+          1: u"Classic Rock",
+          2: u"Country",
+          3: u"Dance",
+          4: u"Disco",
+          5: u"Funk",
+          6: u"Grunge",
+          7: u"Hip-Hop",
+          8: u"Jazz",
+          9: u"Metal",
+         10: u"New Age",
+         11: u"Oldies",
+         12: u"Other",
+         13: u"Pop",
+         14: u"R&B",
+         15: u"Rap",
+         16: u"Reggae",
+         17: u"Rock",
+         18: u"Techno",
+         19: u"Industrial",
+         20: u"Alternative",
+         21: u"Ska",
+         22: u"Death Metal",
+         23: u"Pranks",
+         24: u"Soundtrack",
+         25: u"Euro-Techno",
+         26: u"Ambient",
+         27: u"Trip-Hop",
+         28: u"Vocal",
+         29: u"Jazz+Funk",
+         30: u"Fusion",
+         31: u"Trance",
+         32: u"Classical",
+         33: u"Instrumental",
+         34: u"Acid",
+         35: u"House",
+         36: u"Game",
+         37: u"Sound Clip",
+         38: u"Gospel",
+         39: u"Noise",
+         40: u"AlternRock",
+         41: u"Bass",
+         42: u"Soul",
+         43: u"Punk",
+         44: u"Space",
+         45: u"Meditative",
+         46: u"Instrumental Pop",
+         47: u"Instrumental Rock",
+         48: u"Ethnic",
+         49: u"Gothic",
+         50: u"Darkwave",
+         51: u"Techno-Industrial",
+         52: u"Electronic",
+         53: u"Pop-Folk",
+         54: u"Eurodance",
+         55: u"Dream",
+         56: u"Southern Rock",
+         57: u"Comedy",
+         58: u"Cult",
+         59: u"Gangsta",
+         60: u"Top 40",
+         61: u"Christian Rap",
+         62: u"Pop/Funk",
+         63: u"Jungle",
+         64: u"Native American",
+         65: u"Cabaret",
+         66: u"New Wave",
+         67: u"Psychadelic",
+         68: u"Rave",
+         69: u"Showtunes",
+         70: u"Trailer",
+         71: u"Lo-Fi",
+         72: u"Tribal",
+         73: u"Acid Punk",
+         74: u"Acid Jazz",
+         75: u"Polka",
+         76: u"Retro",
+         77: u"Musical",
+         78: u"Rock & Roll",
+         79: u"Hard Rock",
+         # Following are winamp extentions
+         80: u"Folk",
+         81: u"Folk-Rock",
+         82: u"National Folk",
+         83: u"Swing",
+         84: u"Fast Fusion",
+         85: u"Bebob",
+         86: u"Latin",
+         87: u"Revival",
+         88: u"Celtic",
+         89: u"Bluegrass",
+         90: u"Avantgarde",
+         91: u"Gothic Rock",
+         92: u"Progressive Rock",
+         93: u"Psychedelic Rock",
+         94: u"Symphonic Rock",
+         95: u"Slow Rock",
+         96: u"Big Band",
+         97: u"Chorus",
+         98: u"Easy Listening",
+         99: u"Acoustic",
+        100: u"Humour",
+        101: u"Speech",
+        102: u"Chanson",
+        103: u"Opera",
+        104: u"Chamber Music",
+        105: u"Sonata",
+        106: u"Symphony",
+        107: u"Booty Bass",
+        108: u"Primus",
+        109: u"Porn Groove",
+        110: u"Satire",
+        111: u"Slow Jam",
+        112: u"Club",
+        113: u"Tango",
+        114: u"Samba",
+        115: u"Folklore",
+        116: u"Ballad",
+        117: u"Power Ballad",
+        118: u"Rhythmic Soul",
+        119: u"Freestyle",
+        120: u"Duet",
+        121: u"Punk Rock",
+        122: u"Drum Solo",
+        123: u"A capella",
+        124: u"Euro-House",
+        125: u"Dance Hall",
+        126: u"Goa",
+        127: u"Drum & Bass",
+        128: u"Club-House",
+        129: u"Hardcore",
+        130: u"Terror",
+        131: u"Indie",
+        132: u"Britpop",
+        133: u"Negerpunk",
+        134: u"Polsk Punk",
+        135: u"Beat",
+        136: u"Christian Gangsta Rap",
+        137: u"Heavy Metal",
+        138: u"Black Metal",
+        139: u"Crossover",
+        140: u"Contemporary Christian",
+        141: u"Christian Rock ",
+        142: u"Merengue",
+        143: u"Salsa",
+        144: u"Trash Metal",
+        145: u"Anime",
+        146: u"JPop",
+        147: u"Synthpop"
+    }
+
+    def createFields(self):
+        yield String(self, "signature", 3, "IDv1 signature (\"TAG\")", charset="ASCII")
+        if self["signature"].value != "TAG":
+            raise MatchError("Stream doesn't look like ID3v1 (wrong signature)!")
+        # TODO: Charset of below strings?
+        yield String(self, "song", 30, "Song title", strip=" \0", charset="ISO-8859-1")
+        yield String(self, "author", 30, "Author", strip=" \0", charset="ISO-8859-1")
+        yield String(self, "album", 30, "Album title", strip=" \0", charset="ISO-8859-1")
+        yield String(self, "year", 4, "Year", strip=" \0", charset="ISO-8859-1")
+
+        # TODO: Write better algorithm to guess ID3v1 version
+        version = self.getVersion()
+        if version in ("v1.1", "v1.1b"):
+            if version == "v1.1b":
+                # ID3 v1.1b
+                yield String(self, "comment", 29, "Comment", strip=" \0", charset="ISO-8859-1")
+                yield UInt8(self, "track_nb", "Track number")
+            else:
+                # ID3 v1.1
+                yield String(self, "comment", 30, "Comment", strip=" \0", charset="ISO-8859-1")
+            yield Enum(UInt8(self, "genre", "Genre"), self.GENRE_NAME)
+        else:
+            # ID3 v1.0
+            yield String(self, "comment", 31, "Comment", strip=" \0", charset="ISO-8859-1")
+
+    def getVersion(self):
+        addr = self.absolute_address + 126*8
+        bytes = self.stream.readBytes(addr, 2)
+
+        # last byte (127) is not space?
+        if bytes[1] != ' ':
+            # byte 126 is nul?
+            if bytes[0] == 0x00:
+                return "v1.1"
+            else:
+                return "v1.1b"
+        else:
+            return "1.0"
+
+    def createDescription(self):
+        version = self.getVersion()
+        return "ID3 %s: author=%s, song=%s" % (
+            version, self["author"].value, self["song"].value)
+
+def getCharset(field):
+    try:
+        key = field.value
+        return ID3_StringCharset.charset_name[key]
+    except KeyError:
+        raise ParserError("ID3v2: Invalid charset (%s)." % key)
+
+class ID3_String(FieldSet):
+    STRIP = " \0"
+    def createFields(self):
+        yield String(self, "text", self._size/8, "Text", charset="ISO-8859-1", strip=self.STRIP)
+
+class ID3_StringCharset(ID3_String):
+    STRIP = " \0"
+    charset_desc = {
+        0: "ISO-8859-1",
+        1: "UTF-16 with BOM",
+        2: "UTF-16 (big endian)",
+        3: "UTF-8"
+    }
+    charset_name = {
+        0: "ISO-8859-1",
+        1: "UTF-16",
+        2: "UTF-16-BE",
+        3: "UTF-8"
+    }
+    def createFields(self):
+        yield Enum(UInt8(self, "charset"), self.charset_desc)
+        size = (self.size - self.current_size)/8
+        if not size:
+            return
+        charset = getCharset(self["charset"])
+        yield String(self, "text", size, "Text", charset=charset, strip=self.STRIP)
+
+class ID3_GEOB(ID3_StringCharset):
+    def createFields(self):
+        yield Enum(UInt8(self, "charset"), self.charset_desc)
+        charset = getCharset(self["charset"])
+        yield CString(self, "mime", "MIME type", charset=charset)
+        yield CString(self, "filename", "File name", charset=charset)
+        yield CString(self, "description", "Content description", charset=charset)
+        size = (self.size - self.current_size) // 8
+        if not size:
+            return
+        yield String(self, "text", size, "Text", charset=charset)
+
+class ID3_Comment(ID3_StringCharset):
+    def createFields(self):
+        yield Enum(UInt8(self, "charset"), self.charset_desc)
+        yield String(self, "lang", 3, "Language", charset="ASCII")
+        charset = getCharset(self["charset"])
+        yield CString(self, "title", "Title", charset=charset, strip=self.STRIP)
+        size = (self.size - self.current_size) // 8
+        if not size:
+            return
+        yield String(self, "text", size, "Text", charset=charset, strip=self.STRIP)
+
+class ID3_StringTitle(ID3_StringCharset):
+    def createFields(self):
+        yield Enum(UInt8(self, "charset"), self.charset_desc)
+        if self.current_size == self.size:
+            return
+        charset = getCharset(self["charset"])
+        yield CString(self, "title", "Title", charset=charset, strip=self.STRIP)
+        size = (self.size - self.current_size)/8
+        if not size:
+            return
+        yield String(self, "text", size, "Text", charset=charset, strip=self.STRIP)
+
+class ID3_Private(FieldSet):
+    def createFields(self):
+        size = self._size/8
+        # TODO: Strings charset?
+        if self.stream.readBytes(self.absolute_address, 9) == "PeakValue":
+            yield String(self, "text", 9, "Text")
+            size -= 9
+        yield String(self, "content", size, "Content")
+
+class ID3_TrackLength(FieldSet):
+    def createFields(self):
+        yield NullBytes(self, "zero", 1)
+        yield textHandler(String(self, "length", self._size/8 - 1,
+            "Length in ms", charset="ASCII"), self.computeLength)
+
+    def computeLength(self, field):
+        try:
+            ms = int(field.value)
+            return humanDuration(ms)
+        except:
+            return field.value
+
+class ID3_Picture23(FieldSet):
+    pict_type_name = {
+        0x00: "Other",
+        0x01: "32x32 pixels 'file icon' (PNG only)",
+        0x02: "Other file icon",
+        0x03: "Cover (front)",
+        0x04: "Cover (back)",
+        0x05: "Leaflet page",
+        0x06: "Media (e.g. lable side of CD)",
+        0x07: "Lead artist/lead performer/soloist",
+        0x08: "Artist/performer",
+        0x09: "Conductor",
+        0x0A: "Band/Orchestra",
+        0x0B: "Composer",
+        0x0C: "Lyricist/text writer",
+        0x0D: "Recording Location",
+        0x0E: "During recording",
+        0x0F: "During performance",
+        0x10: "Movie/video screen capture",
+        0x11: "A bright coloured fish",
+        0x12: "Illustration",
+        0x13: "Band/artist logotype",
+        0x14: "Publisher/Studio logotype"
+    }
+    def createFields(self):
+        yield Enum(UInt8(self, "charset"), ID3_StringCharset.charset_desc)
+        charset = getCharset(self["charset"])
+        yield String(self, "img_fmt", 3, charset="ASCII")
+        yield Enum(UInt8(self, "pict_type"), self.pict_type_name)
+        yield CString(self, "text", "Text", charset=charset, strip=" \0")
+        size = (self._size - self._current_size) / 8
+        if size:
+            yield RawBytes(self, "img_data", size)
+
+class ID3_Picture24(FieldSet):
+    def createFields(self):
+        yield Enum(UInt8(self, "charset"), ID3_StringCharset.charset_desc)
+        charset = getCharset(self["charset"])
+        yield CString(self, "mime", "MIME type", charset=charset)
+        yield Enum(UInt8(self, "pict_type"), ID3_Picture23.pict_type_name)
+        yield CString(self, "description", charset=charset)
+        size = (self._size - self._current_size) / 8
+        if size:
+            yield RawBytes(self, "img_data", size)
+
+class ID3_Chunk(FieldSet):
+    endian = NETWORK_ENDIAN
+    tag22_name = {
+        "TT2": "Track title",
+        "TP1": "Artist",
+        "TRK": "Track number",
+        "COM": "Comment",
+        "TCM": "Composer",
+        "TAL": "Album",
+        "TYE": "Year",
+        "TEN": "Encoder",
+        "TCO": "Content type",
+        "PIC": "Picture"
+    }
+    tag23_name = {
+        "COMM": "Comment",
+        "GEOB": "Encapsulated object",
+        "PRIV": "Private",
+        "TPE1": "Artist",
+        "TCOP": "Copyright",
+        "TALB": "Album",
+        "TENC": "Encoder",
+        "TYER": "Year",
+        "TSSE": "Encoder settings",
+        "TCOM": "Composer",
+        "TRCK": "Track number",
+        "PCNT": "Play counter",
+        "TCON": "Content type",
+        "TLEN": "Track length",
+        "TIT2": "Track title",
+        "WXXX": "User defined URL"
+    }
+    handler = {
+        "COMM": ID3_Comment,
+        "COM": ID3_Comment,
+        "GEOB": ID3_GEOB,
+        "PIC": ID3_Picture23,
+        "APIC": ID3_Picture24,
+        "PRIV": ID3_Private,
+        "TXXX": ID3_StringTitle,
+        "WOAR": ID3_String,
+        "WXXX": ID3_StringTitle,
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        if 3 <= self["../ver_major"].value:
+            self._size = (10 + self["size"].value) * 8
+        else:
+            self._size = (self["size"].value + 6) * 8
+
+    def createFields(self):
+        if 3 <= self["../ver_major"].value:
+            # ID3 v2.3 and 2.4
+            yield Enum(String(self, "tag", 4, "Tag", charset="ASCII", strip="\0"), ID3_Chunk.tag23_name)
+            if 4 <= self["../ver_major"].value:
+                yield ID3_Size(self, "size")   # ID3 v2.4
+            else:
+                yield UInt32(self, "size")   # ID3 v2.3
+
+            yield Bit(self, "tag_alter", "Tag alter preservation")
+            yield Bit(self, "file_alter", "Tag alter preservation")
+            yield Bit(self, "rd_only", "Read only?")
+            yield NullBits(self, "padding[]", 5)
+
+            yield Bit(self, "compressed", "Frame is compressed?")
+            yield Bit(self, "encrypted", "Frame is encrypted?")
+            yield Bit(self, "group", "Grouping identity")
+            yield NullBits(self, "padding[]", 5)
+            size = self["size"].value
+            is_compressed = self["compressed"].value
+        else:
+            # ID3 v2.2
+            yield Enum(String(self, "tag", 3, "Tag", charset="ASCII", strip="\0"), ID3_Chunk.tag22_name)
+            yield UInt24(self, "size")
+            size = self["size"].value - self.current_size/8 + 6
+            is_compressed = False
+
+        if size:
+            cls = None
+            if not(is_compressed):
+                tag = self["tag"].value
+                if tag in ID3_Chunk.handler:
+                    cls = ID3_Chunk.handler[tag]
+                elif tag[0] == "T":
+                    cls = ID3_StringCharset
+            if cls:
+                yield cls(self, "content", "Content", size=size*8)
+            else:
+                yield RawBytes(self, "content", size, "Raw data content")
+
+    def createDescription(self):
+        if self["size"].value != 0:
+            return "ID3 Chunk: %s" % self["tag"].display
+        else:
+            return "ID3 Chunk: (terminator)"
+
+class ID3_Size(Bits):
+    static_size = 32
+
+    def __init__(self, parent, name, description=None):
+        Bits.__init__(self, parent, name, 32, description)
+
+    def createValue(self):
+        data = self.parent.stream.readBytes(self.absolute_address, 4)
+        # TODO: Check that bit #7 of each byte is nul: not(ord(data[i]) & 127)
+        return reduce(lambda x, y: x*128 + y, (ord(item) for item in data ))
+
+class ID3v2(FieldSet):
+    endian = NETWORK_ENDIAN
+    VALID_MAJOR_VERSIONS = (2, 3, 4)
+
+    def __init__(self, parent, name, size=None):
+        FieldSet.__init__(self, parent, name, size=size)
+        if not self._size:
+            self._size = (self["size"].value + 10) * 8
+
+    def createDescription(self):
+        return "ID3 v2.%s.%s" % \
+            (self["ver_major"].value, self["ver_minor"].value)
+
+    def createFields(self):
+        # Signature + version
+        yield String(self, "header", 3, "Header (ID3)", charset="ASCII")
+        yield UInt8(self, "ver_major", "Version (major)")
+        yield UInt8(self, "ver_minor", "Version (minor)")
+
+        # Check format
+        if self["header"].value != "ID3":
+            raise MatchError("Signature error, should be \"ID3\".")
+        if self["ver_major"].value not in self.VALID_MAJOR_VERSIONS \
+        or self["ver_minor"].value != 0:
+            raise MatchError(
+                "Unknown ID3 metadata version (2.%u.%u)"
+                % (self["ver_major"].value, self["ver_minor"].value))
+
+        # Flags
+        yield Bit(self, "unsync", "Unsynchronisation is used?")
+        yield Bit(self, "ext", "Extended header is used?")
+        yield Bit(self, "exp", "Experimental indicator")
+        yield NullBits(self, "padding[]", 5)
+
+        # Size
+        yield ID3_Size(self, "size")
+
+        # All tags
+        while self.current_size < self._size:
+            field = ID3_Chunk(self, "field[]")
+            yield field
+            if field["size"].value == 0:
+                break
+
+        # Search first byte of the MPEG file
+        padding = self.seekBit(self._size)
+        if padding:
+            yield padding
+
diff --git a/lib/hachoir_parser/audio/itunesdb.py b/lib/hachoir_parser/audio/itunesdb.py
new file mode 100644
index 0000000000000000000000000000000000000000..3472d2d85738a2221097d54db88443b14b301bb1
--- /dev/null
+++ b/lib/hachoir_parser/audio/itunesdb.py
@@ -0,0 +1,433 @@
+"""
+iPod iTunesDB parser.
+
+Documentation:
+- http://ipodlinux.org/ITunesDB
+
+Author: Romain HERAULT
+Creation date: 19 august 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt8, UInt16, UInt32, UInt64, TimestampMac32,
+    String, Float32, NullBytes, Enum)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.tools import humanDuration
+from hachoir_core.text_handler import displayHandler, filesizeHandler
+
+list_order={
+        1 : "playlist order (manual sort order)",
+        2 : "???",
+        3 : "songtitle",
+        4 : "album",
+        5 : "artist",
+        6 : "bitrate",
+        7 : "genre",
+        8 : "kind",
+        9 : "date modified",
+        10 : "track number",
+        11 : "size",
+        12 : "time",
+        13 : "year",
+        14 : "sample rate",
+        15 : "comment",
+        16 : "date added",
+        17 : "equalizer",
+        18 : "composer",
+        19 : "???",
+        20 : "play count",
+        21 : "last played",
+        22 : "disc number",
+        23 : "my rating",
+        24 : "release date",
+        25 : "BPM",
+        26 : "grouping",
+        27 : "category",
+        28 : "description",
+        29 : "show",
+        30 : "season",
+        31 : "episode number"
+    }
+
+class DataObject(FieldSet):
+    type_name={
+        1:"Title",
+        2:"Location",
+        3:"Album",
+        4:"Artist",
+        5:"Genre",
+        6:"Filetype",
+        7:"EQ Setting",
+        8:"Comment",
+        9:"Category",
+        12:"Composer",
+        13:"Grouping",
+        14:"Description text",
+        15:"Podcast Enclosure URL",
+        16:"Podcast RSS URL",
+        17:"Chapter data",
+        18:"Subtitle",
+        19:"Show (for TV Shows only)",
+        20:"Episode",
+        21:"TV Network",
+        50:"Smart Playlist Data",
+        51:"Smart Playlist Rules",
+        52:"Library Playlist Index",
+        100:"Column info",
+    }
+
+    mhod52_sort_index_type_name={
+        3:"Title",
+        4:"Album, then Disk/Tracknumber, then Title",
+        5:"Artist, then Album, then Disc/Tracknumber, then Title",
+        7:"Genre, then Artist, then Album, then Disc/Tracknumber, then Title",
+        8:"Composer, then Title"
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = self["entry_length"].value *8
+
+    def createFields(self):
+        yield String(self, "header_id", 4, "Data Object Header Markup (\"mhod\")", charset="ISO-8859-1")
+        yield UInt32(self, "header_length", "Header Length")
+        yield UInt32(self, "entry_length", "Entry Length")
+        yield Enum(UInt32(self, "type", "type"),self.type_name)
+        if(self["type"].value<15):
+            yield UInt32(self, "unknown[]")
+            yield UInt32(self, "unknown[]")
+            yield UInt32(self, "position", "Position")
+            yield UInt32(self, "length", "String Length in bytes")
+            yield UInt32(self, "unknown[]")
+            yield UInt32(self, "unknown[]")
+            yield String(self, "string", self["length"].value, "String Data", charset="UTF-16-LE")
+        elif (self["type"].value<17):
+            yield UInt32(self, "unknown[]")
+            yield UInt32(self, "unknown[]")
+            yield String(self, "string", self._size/8-self["header_length"].value, "String Data", charset="UTF-8")
+        elif (self["type"].value == 52):
+            yield UInt32(self, "unknown[]", "unk1")
+            yield UInt32(self, "unknown[]", "unk2")
+            yield Enum(UInt32(self, "sort_index_type", "Sort Index Type"),self.mhod52_sort_index_type_name)
+            yield UInt32(self, "entry_count", "Entry Count")
+            indexes_size = self["entry_count"].value*4
+            padding_offset = self["entry_length"].value - indexes_size
+            padding = self.seekByte(padding_offset, "header padding")
+            if padding:
+                yield padding
+            for i in xrange(self["entry_count"].value):
+                yield UInt32(self, "index["+str(i)+"]", "Index of the "+str(i)+"nth mhit")
+        else:
+            padding = self.seekByte(self["header_length"].value, "header padding")
+            if padding:
+                yield padding
+        padding = self.seekBit(self._size, "entry padding")
+        if padding:
+            yield padding
+
+class TrackItem(FieldSet):
+    x1_type_name={
+        0:"AAC or CBR MP3",
+        1:"VBR MP3"
+    }
+    x2_type_name={
+        0:"AAC",
+        1:"MP3"
+    }
+    media_type_name={
+        0x00:"Audio/Video",
+        0x01:"Audio",
+        0x02:"Video",
+        0x04:"Podcast",
+        0x06:"Video Podcast",
+        0x08:"Audiobook",
+        0x20:"Music Video",
+        0x40:"TV Show",
+        0X60:"TV Show (Music lists)",
+    }
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = self["entry_length"].value *8
+
+    def createFields(self):
+        yield String(self, "header_id", 4, "Track Item Header Markup (\"mhit\")", charset="ISO-8859-1")
+        yield UInt32(self, "header_length", "Header Length")
+        yield UInt32(self, "entry_length", "Entry Length")
+        yield UInt32(self, "string_number", "Number of Strings")
+        yield UInt32(self, "unique_id", "Unique ID")
+        yield UInt32(self, "visible_tag", "Visible Tag")
+        yield String(self, "file_type", 4, "File Type")
+        yield Enum(UInt8(self, "x1_type", "Extended Type 1"),self.x1_type_name)
+        yield Enum(UInt8(self, "x2_type", "Extended type 2"),self.x2_type_name)
+        yield UInt8(self, "compilation_flag", "Compilation Flag")
+        yield UInt8(self, "rating", "Rating")
+        yield TimestampMac32(self, "added_date", "Date when the item was added")
+        yield filesizeHandler(UInt32(self, "size", "Track size in bytes"))
+        yield displayHandler(UInt32(self, "length", "Track length in milliseconds"), humanDuration)
+        yield UInt32(self, "track_number", "Number of this track")
+        yield UInt32(self, "total_track", "Total number of tracks")
+        yield UInt32(self, "year", "Year of the track")
+        yield UInt32(self, "bitrate", "Bitrate")
+        yield UInt32(self, "samplerate", "Sample Rate")
+        yield UInt32(self, "volume", "volume")
+        yield UInt32(self, "start_time", "Start playing at, in milliseconds")
+        yield UInt32(self, "stop_time", "Stop playing at,  in milliseconds")
+        yield UInt32(self, "soundcheck", "SoundCheck preamp")
+        yield UInt32(self, "playcount_1", "Play count of the track")
+        yield UInt32(self, "playcount_2", "Play count of the track (identical to playcount_1)")
+        yield UInt32(self, "last_played_time", "Time the song was last played")
+        yield UInt32(self, "disc_number", "disc number in multi disc sets")
+        yield UInt32(self, "total_discs", "Total number of discs in the disc set")
+        yield UInt32(self, "userid", "User ID in the DRM scheme")
+        yield TimestampMac32(self, "last_modified", "Time of the last modification of the track")
+        yield UInt32(self, "bookmark_time", "Bookmark time for AudioBook")
+        yield UInt64(self, "dbid", "Unique DataBase ID for the song (identical in mhit and in mhii)")
+        yield UInt8(self, "checked", "song is checked")
+        yield UInt8(self, "application_rating", "Last Rating before change")
+        yield UInt16(self, "BPM", "BPM of the track")
+        yield UInt16(self, "artwork_count", "number of artworks fo this item")
+        yield UInt16(self, "unknown[]")
+        yield UInt32(self, "artwork_size", "Total size of artworks in bytes")
+        yield UInt32(self, "unknown[]")
+        yield Float32(self, "sample_rate_2", "Sample Rate express in float")
+        yield UInt32(self, "released_date", "Date of release in Music Store or in Podcast")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt8(self, "has_artwork", "0x01 for track with artwork, 0x02 otherwise")
+        yield UInt8(self, "skip_wen_shuffling", "Skip that track when shuffling")
+        yield UInt8(self, "remember_playback_position", "Remember playback position")
+        yield UInt8(self, "flag4", "Flag 4")
+        yield UInt64(self, "dbid2", "Unique DataBase ID for the song (identical as above)")
+        yield UInt8(self, "lyrics_flag", "Lyrics Flag")
+        yield UInt8(self, "movie_file_flag", "Movie File Flag")
+        yield UInt8(self, "played_mark", "Track has been played")
+        yield UInt8(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "sample_count", "Number of samples in the song (only for WAV and AAC files)")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield Enum(UInt32(self, "media_type", "Media Type for video iPod"),self.media_type_name)
+        yield UInt32(self, "season_number", "Season Number")
+        yield UInt32(self, "episode_number", "Episode Number")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        padding = self.seekByte(self["header_length"].value, "header padding")
+        if padding:
+            yield padding
+
+        #while ((self.stream.readBytes(0, 4) == 'mhod') and  ((self.current_size/8) < self["entry_length"].value)):
+        for i in xrange(self["string_number"].value):
+            yield DataObject(self, "data[]")
+        padding = self.seekBit(self._size, "entry padding")
+        if padding:
+            yield padding
+
+class TrackList(FieldSet):
+    def createFields(self):
+        yield String(self, "header_id", 4, "Track List Header Markup (\"mhlt\")", charset="ISO-8859-1")
+        yield UInt32(self, "header_length", "Header Length")
+        yield UInt32(self, "track_number", "Number of Tracks")
+
+        padding = self.seekByte(self["header_length"].value, "header padding")
+        if padding:
+            yield padding
+
+        for i in xrange(self["track_number"].value):
+            yield TrackItem(self, "track[]")
+
+class PlaylistItem(FieldSet):
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = self["entry_length"].value *8
+
+    def createFields(self):
+        yield String(self, "header_id", 4, "Playlist Item Header Markup (\"mhip\")", charset="ISO-8859-1")
+        yield UInt32(self, "header_length", "Header Length")
+        yield UInt32(self, "entry_length", "Entry Length")
+        yield UInt32(self, "data_object_child_count", "Number of Child Data Objects")
+        yield UInt32(self, "podcast_grouping_flag", "Podcast Grouping Flag")
+        yield UInt32(self, "group_id", "Group ID")
+        yield UInt32(self, "track_id", "Track ID")
+        yield TimestampMac32(self, "timestamp", "Song Timestamp")
+        yield UInt32(self, "podcast_grouping_ref", "Podcast Grouping Reference")
+        padding = self.seekByte(self["header_length"].value, "header padding")
+        if padding:
+            yield padding
+
+        for i in xrange(self["data_object_child_count"].value):
+            yield DataObject(self, "mhod[]")
+
+
+class Playlist(FieldSet):
+    is_master_pl_name={
+        0:"Regular playlist",
+        1:"Master playlist"
+    }
+
+    is_podcast_name={
+        0:"Normal Playlist List",
+        1:"Podcast Playlist List"
+    }
+
+    list_sort_order_name={
+        1:"Manual Sort Order",
+        2:"???",
+        3:"Song Title",
+        4:"Album",
+        5:"Artist",
+        6:"Bitrate",
+        7:"Genre",
+        8:"Kind",
+        9:"Date Modified",
+        10:"Track Number",
+        11:"Size",
+        12:"Time",
+        13:"Year",
+        14:"Sample Rate",
+        15:"Comment",
+        16:"Date Added",
+        17:"Equalizer",
+        18:"Composer",
+        19:"???",
+        20:"Play Count",
+        21:"Last Played",
+        22:"Disc Number",
+        23:"My Rating",
+        24:"Release Date",
+        25:"BPM",
+        26:"Grouping",
+        27:"Category",
+        28:"Description",
+        29:"Show",
+        30:"Season",
+        31:"Episode Number"
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = self["entry_length"].value *8
+
+    def createFields(self):
+        yield String(self, "header_id", 4, "Playlist List Header Markup (\"mhyp\")", charset="ISO-8859-1")
+        yield UInt32(self, "header_length", "Header Length")
+        yield UInt32(self, "entry_length", "Entry Length")
+        yield UInt32(self, "data_object_child_count", "Number of Child Data Objects")
+        yield UInt32(self, "playlist_count", "Number of Playlist Items")
+        yield Enum(UInt8(self, "type", "Normal or master playlist?"), self.is_master_pl_name)
+        yield UInt8(self, "XXX1", "XXX1")
+        yield UInt8(self, "XXX2", "XXX2")
+        yield UInt8(self, "XXX3", "XXX3")
+        yield TimestampMac32(self, "creation_date", "Date when the playlist was created")
+        yield UInt64(self, "playlistid", "Persistent Playlist ID")
+        yield UInt32(self, "unk3", "unk3")
+        yield UInt16(self, "string_mhod_count", "Number of string MHODs for this playlist")
+        yield Enum(UInt16(self, "is_podcast", "Playlist or Podcast List?"), self.is_podcast_name)
+        yield Enum(UInt32(self, "sort_order", "Playlist Sort Order"), self.list_sort_order_name)
+
+        padding = self.seekByte(self["header_length"].value, "entry padding")
+        if padding:
+            yield padding
+
+        for i in xrange(self["data_object_child_count"].value):
+            yield DataObject(self, "mhod[]")
+
+        for i in xrange(self["playlist_count"].value):
+            yield PlaylistItem(self, "playlist_item[]")
+
+
+
+class PlaylistList(FieldSet):
+    def createFields(self):
+        yield String(self, "header_id", 4, "Playlist List Header Markup (\"mhlp\")", charset="ISO-8859-1")
+        yield UInt32(self, "header_length", "Header Length")
+        yield UInt32(self, "playlist_number", "Number of Playlists")
+
+        padding = self.seekByte(self["header_length"].value, "header padding")
+        if padding:
+            yield padding
+
+        for i in xrange(self["playlist_number"].value):
+            yield Playlist(self, "playlist[]")
+
+class DataSet(FieldSet):
+    type_name={
+        1:"Track List",
+        2:"Play List",
+        3:"Podcast List"
+        }
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = self["entry_length"].value *8
+
+    def createFields(self):
+        yield String(self, "header_id", 4, "DataSet Header Markup (\"mhsd\")", charset="ISO-8859-1")
+        yield UInt32(self, "header_length", "Header Length")
+        yield UInt32(self, "entry_length", "Entry Length")
+        yield Enum(UInt32(self, "type", "type"),self.type_name)
+        padding = self.seekByte(self["header_length"].value, "header_raw")
+        if padding:
+            yield padding
+        if self["type"].value == 1:
+            yield TrackList(self, "tracklist[]")
+        if self["type"].value == 2:
+            yield PlaylistList(self, "playlist_list[]");
+        if self["type"].value == 3:
+            yield PlaylistList(self, "podcast_list[]");
+        padding = self.seekBit(self._size, "entry padding")
+        if padding:
+            yield padding
+
+class DataBase(FieldSet):
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = self["entry_length"].value *8
+
+#    def createFields(self):
+
+class ITunesDBFile(Parser):
+    PARSER_TAGS = {
+        "id": "itunesdb",
+        "category": "audio",
+        "min_size": 44*8,
+        "magic": (('mhbd',0),),
+        "description": "iPod iTunesDB file"
+    }
+
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        return self.stream.readBytes(0, 4) == 'mhbd'
+
+    def createFields(self):
+        yield String(self, "header_id", 4, "DataBase Header Markup (\"mhbd\")", charset="ISO-8859-1")
+        yield UInt32(self, "header_length", "Header Length")
+        yield UInt32(self, "entry_length", "Entry Length")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "version_number", "Version Number")
+        yield UInt32(self, "child_number", "Number of Children")
+        yield UInt64(self, "id", "ID for this database")
+        yield UInt32(self, "unknown[]")
+        yield UInt64(self, "initial_dbid", "Initial DBID")
+        size = self["header_length"].value-self.current_size/ 8
+        if size>0:
+            yield NullBytes(self, "padding", size)
+        for i in xrange(self["child_number"].value):
+            yield DataSet(self, "dataset[]")
+        padding = self.seekByte(self["entry_length"].value, "entry padding")
+        if padding:
+            yield padding
+
+    def createContentSize(self):
+        return self["entry_length"].value * 8
+
diff --git a/lib/hachoir_parser/audio/midi.py b/lib/hachoir_parser/audio/midi.py
new file mode 100644
index 0000000000000000000000000000000000000000..01da22893f39ddcfe3dce8a95cecd422907aa7b5
--- /dev/null
+++ b/lib/hachoir_parser/audio/midi.py
@@ -0,0 +1,216 @@
+"""
+Musical Instrument Digital Interface (MIDI) audio file parser.
+
+Documentation:
+ - Standard MIDI File Format, Dustin Caldwell (downloaded on wotsit.org)
+
+Author: Victor Stinner
+Creation: 27 december 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, Bits, ParserError,
+    String, UInt32, UInt24, UInt16, UInt8, Enum, RawBytes)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.tools import createDict, humanDurationNanosec
+from hachoir_parser.common.tracker import NOTE_NAME
+
+MAX_FILESIZE = 10 * 1024 * 1024
+
+class Integer(Bits):
+    def __init__(self, parent, name, description=None):
+        Bits.__init__(self, parent, name, 8, description)
+        stream = parent.stream
+        addr = self.absolute_address
+        value = 0
+        while True:
+            bits = stream.readBits(addr, 8, parent.endian)
+            value = (value << 7) + (bits & 127)
+            if not(bits & 128):
+                break
+            addr += 8
+            self._size += 8
+            if 32 < self._size:
+                raise ParserError("Integer size is bigger than 32-bit")
+        self.createValue = lambda: value
+
+def parseNote(parser):
+    yield Enum(UInt8(parser, "note", "Note number"), NOTE_NAME)
+    yield UInt8(parser, "velocity")
+
+def parseControl(parser):
+    yield UInt8(parser, "control", "Controller number")
+    yield UInt8(parser, "value", "New value")
+
+def parsePatch(parser):
+    yield UInt8(parser, "program", "New program number")
+
+def parseChannel(parser):
+    yield UInt8(parser, "channel", "Channel number")
+
+def parsePitch(parser):
+    yield UInt8(parser, "bottom", "(least sig) 7 bits of value")
+    yield UInt8(parser, "top", "(most sig) 7 bits of value")
+
+def parseText(parser, size):
+    yield String(parser, "text", size)
+
+def formatTempo(field):
+    return humanDurationNanosec(field.value*1000)
+
+def parseTempo(parser, size):
+    yield textHandler(UInt24(parser, "microsec_quarter", "Microseconds per quarter note"), formatTempo)
+
+def parseTimeSignature(parser, size):
+    yield UInt8(parser, "numerator", "Numerator of time signature")
+    yield UInt8(parser, "denominator", "denominator of time signature 2=quarter 3=eighth, etc.")
+    yield UInt8(parser, "nb_tick", "Number of ticks in metronome click")
+    yield UInt8(parser, "nb_32nd_note", "Number of 32nd notes to the quarter note")
+
+class Command(FieldSet):
+    COMMAND = {}
+    for channel in xrange(16):
+        COMMAND[0x80+channel] = ("Note off (channel %u)" % channel, parseNote)
+        COMMAND[0x90+channel] = ("Note on (channel %u)" % channel, parseNote)
+        COMMAND[0xA0+channel] = ("Key after-touch (channel %u)" % channel, parseNote)
+        COMMAND[0xB0+channel] = ("Control change (channel %u)" % channel, parseControl)
+        COMMAND[0xC0+channel] = ("Program (patch) change (channel %u)" % channel, parsePatch)
+        COMMAND[0xD0+channel] = ("Channel after-touch (channel %u)" % channel, parseChannel)
+        COMMAND[0xE0+channel] = ("Pitch wheel change (channel %u)" % channel, parsePitch)
+    COMMAND_DESC = createDict(COMMAND, 0)
+    COMMAND_PARSER = createDict(COMMAND, 1)
+
+    META_COMMAND_TEXT = 1
+    META_COMMAND_NAME = 3
+    META_COMMAND = {
+        0x00: ("Sets the track's sequence number", None),
+        0x01: ("Text event", parseText),
+        0x02: ("Copyright info", parseText),
+        0x03: ("Sequence or Track name", parseText),
+        0x04: ("Track instrument name", parseText),
+        0x05: ("Lyric", parseText),
+        0x06: ("Marker", parseText),
+        0x07: ("Cue point", parseText),
+        0x2F: ("End of the track", None),
+        0x51: ("Set tempo", parseTempo),
+        0x58: ("Time Signature", parseTimeSignature),
+        0x59: ("Key signature", None),
+        0x7F: ("Sequencer specific information", None),
+    }
+    META_COMMAND_DESC = createDict(META_COMMAND, 0)
+    META_COMMAND_PARSER = createDict(META_COMMAND, 1)
+
+    def createFields(self):
+        yield Integer(self, "time", "Delta time in ticks")
+        yield Enum(textHandler(UInt8(self, "command"), hexadecimal), self.COMMAND_DESC)
+        command = self["command"].value
+        if command == 0xFF:
+            yield Enum(textHandler(UInt8(self, "meta_command"), hexadecimal), self.META_COMMAND_DESC)
+            yield UInt8(self, "data_len")
+            size = self["data_len"].value
+            if size:
+                command = self["meta_command"].value
+                if command in self.META_COMMAND_PARSER:
+                    parser = self.META_COMMAND_PARSER[command]
+                else:
+                    parser = None
+                if parser:
+                    for field in parser(self, size):
+                        yield field
+                else:
+                    yield RawBytes(self, "data", size)
+        else:
+            if command not in self.COMMAND_PARSER:
+                raise ParserError("Unknown command: %s" % self["command"].display)
+            parser = self.COMMAND_PARSER[command]
+            for field in parser(self):
+                yield field
+
+    def createDescription(self):
+        if "meta_command" in self:
+            return self["meta_command"].display
+        else:
+            return self["command"].display
+
+class Track(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = (8 + self["size"].value) * 8
+
+    def createFields(self):
+        yield String(self, "marker", 4, "Track marker (MTrk)", charset="ASCII")
+        yield UInt32(self, "size")
+        if True:
+            while not self.eof:
+                yield Command(self, "command[]")
+        else:
+            size = self["size"].value
+            if size:
+                yield RawBytes(self, "raw", size)
+
+    def createDescription(self):
+        command = self["command[0]"]
+        if "meta_command" in command \
+        and command["meta_command"].value in (Command.META_COMMAND_TEXT, Command.META_COMMAND_NAME) \
+        and "text" in command:
+            return command["text"].value.strip("\r\n")
+        else:
+            return ""
+
+class Header(FieldSet):
+    static_size = 10*8
+    FILE_FORMAT = {
+        0: "Single track",
+        1: "Multiple tracks, synchronous",
+        2: "Multiple tracks, asynchronous",
+    }
+
+    def createFields(self):
+        yield UInt32(self, "size")
+        yield Enum(UInt16(self, "file_format"), self.FILE_FORMAT)
+        yield UInt16(self, "nb_track")
+        yield UInt16(self, "delta_time", "Delta-time ticks per quarter note")
+
+    def createDescription(self):
+        return "%s; %s tracks" % (
+            self["file_format"].display, self["nb_track"].value)
+
+class MidiFile(Parser):
+    MAGIC = "MThd"
+    PARSER_TAGS = {
+        "id": "midi",
+        "category": "audio",
+        "file_ext": ["mid", "midi"],
+        "mime": (u"audio/mime", ),
+        "magic": ((MAGIC, 0),),
+        "min_size": 64,
+        "description": "MIDI audio"
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != self.MAGIC:
+            return "Invalid signature"
+        if self["header/size"].value != 6:
+            return "Invalid header size"
+        return True
+
+    def createFields(self):
+        yield String(self, "signature", 4, r"MIDI signature (MThd)", charset="ASCII")
+        yield Header(self, "header")
+        while not self.eof:
+            yield Track(self, "track[]")
+
+    def createDescription(self):
+        return "MIDI audio: %s" % self["header"].description
+
+    def createContentSize(self):
+        count = self["/header/nb_track"].value - 1
+        start = self["track[%u]" % count].absolute_address
+        # Search "End of track" of last track
+        end = self.stream.searchBytes("\xff\x2f\x00", start, MAX_FILESIZE*8)
+        if end is not None:
+            return end + 3*8
+        return None
+
diff --git a/lib/hachoir_parser/audio/mod.py b/lib/hachoir_parser/audio/mod.py
new file mode 100644
index 0000000000000000000000000000000000000000..75025e0cdf9fd4fa11f0808a595e2a1aed2201fb
--- /dev/null
+++ b/lib/hachoir_parser/audio/mod.py
@@ -0,0 +1,149 @@
+"""
+Parser of FastTrackerII Extended Module (XM) version 1.4
+
+Documents:
+- Modplug source code (file modplug/soundlib/Load_mod.cpp)
+  http://sourceforge.net/projects/modplug
+- Dumb source code (files include/dumb.h and src/it/readmod.c
+  http://dumb.sf.net/
+- Documents on "MOD" format on Wotsit
+  http://www.wotsit.org
+
+Compressed formats (i.e. starting with "PP20" or having "PACK" as type
+are not handled. Also NoiseTracker's NST modules aren't handled, although
+it might be possible: no file format and 15 samples
+
+Author: Christophe GISQUET <christophe.gisquet@free.fr>
+Creation: 18th February 2007
+"""
+
+from math import log10
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    Bits, UInt16, UInt8,
+    RawBytes, String, GenericVector)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import textHandler
+
+# Old NoiseTracker 15-samples modules can have anything here.
+MODULE_TYPE = {
+    "M.K.": ("Noise/Pro-Tracker", 4),
+    "M!K!": ("Noise/Pro-Tracker", 4),
+    "M&K&": ("Noise/Pro-Tracker", 4),
+    "RASP": ("StarTrekker", 4),
+    "FLT4": ("StarTrekker", 4),
+    "FLT8": ("StarTrekker", 8),
+    "6CHN": ("FastTracker", 6),
+    "8CHN": ("FastTracker", 8),
+    "CD81": ("Octalyser", 8),
+    "OCTA": ("Octalyser", 8),
+    "FA04": ("Digital Tracker", 4),
+    "FA06": ("Digital Tracker", 6),
+    "FA08": ("Digital Tracker", 8),
+}
+
+def getFineTune(val):
+    return ("0", "1", "2", "3", "4", "5", "6", "7", "8",
+            "-8", "-7", "-6", "-5", "-4", "-3", "-2", "-1")[val.value]
+
+def getVolume(val):
+    return "%.1f dB" % (20.0*log10(val.value/64.0))
+
+class SampleInfo(FieldSet):
+    static_size = 30*8
+    def createFields(self):
+        yield String(self, "name", 22, strip='\0')
+        yield UInt16(self, "sample_count")
+        yield textHandler(UInt8(self, "fine_tune"), getFineTune)
+        yield textHandler(UInt8(self, "volume"), getVolume)
+        yield UInt16(self, "loop_start", "Loop start offset in samples")
+        yield UInt16(self, "loop_len", "Loop length in samples")
+
+    def createValue(self):
+        return self["name"].value
+
+class Header(FieldSet):
+    static_size = 1084*8
+
+    def createFields(self):
+        yield String(self, "name", 20, strip='\0')
+        yield GenericVector(self, "samples", 31, SampleInfo, "info")
+        yield UInt8(self, "length")
+        yield UInt8(self, "played_patterns_count")
+        yield GenericVector(self, "patterns", 128, UInt8, "position")
+        yield String(self, "type", 4)
+
+    def getNumChannels(self):
+        return MODULE_TYPE[self["type"].value][1]
+
+class Note(FieldSet):
+    static_size = 8*4
+    def createFields(self):
+        yield Bits(self, 4, "note_hi_nibble")
+        yield Bits(self, 12, "period")
+        yield Bits(self, 4, "note_low_nibble")
+        yield Bits(self, 4, "effect")
+        yield UInt8(self, "parameter")
+
+class Row(FieldSet):
+    def __init__(self, parent, name, channels, desc=None):
+        FieldSet.__init__(self, parent, name, description=desc)
+        self.channels = channels
+        self._size = 8*self.channels*4
+
+    def createFields(self):
+        for index in xrange(self.channels):
+            yield Note(self, "note[]")
+
+class Pattern(FieldSet):
+    def __init__(self, parent, name, channels, desc=None):
+        FieldSet.__init__(self, parent, name, description=desc)
+        self.channels = channels
+        self._size = 64*8*self.channels*4
+
+    def createFields(self):
+        for index in xrange(64):
+            yield Row(self, "row[]", self.channels)
+
+class AmigaModule(Parser):
+    PARSER_TAGS = {
+        "id": "mod",
+        "category": "audio",
+        "file_ext": ("mod", "nst", "wow", "oct", "sd0" ),
+        "mime": (u'audio/mod', u'audio/x-mod', u'audio/mod', u'audio/x-mod'),
+        "min_size": 1084*8,
+        "description": "Uncompressed amiga module"
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        t = self.stream.readBytes(1080*8, 4)
+        if t not in MODULE_TYPE:
+            return "Invalid module type '%s'" % t
+        self.createValue = lambda t: "%s module, %u channels" % MODULE_TYPE[t]
+        return True
+
+    def createFields(self):
+        header = Header(self, "header")
+        yield header
+        channels = header.getNumChannels()
+
+        # Number of patterns
+        patterns = 0
+        for index in xrange(128):
+            patterns = max(patterns,
+                           header["patterns/position[%u]" % index].value)
+        patterns += 1
+
+        # Yield patterns
+        for index in xrange(patterns):
+            yield Pattern(self, "pattern[]", channels)
+
+        # Yield samples
+        for index in xrange(31):
+            count = header["samples/info[%u]/sample_count" % index].value
+            if count:
+                self.info("Yielding sample %u: %u samples" % (index, count))
+                yield RawBytes(self, "sample_data[]", 2*count, \
+                               "Sample %u" % index)
+
diff --git a/lib/hachoir_parser/audio/modplug.py b/lib/hachoir_parser/audio/modplug.py
new file mode 100644
index 0000000000000000000000000000000000000000..d0ea0ff47397ccc67020b7ec6eb0de80103210d7
--- /dev/null
+++ b/lib/hachoir_parser/audio/modplug.py
@@ -0,0 +1,291 @@
+"""
+Modplug metadata inserted into module files.
+
+Doc:
+- http://modplug.svn.sourceforge.net/viewvc/modplug/trunk/modplug/soundlib/
+
+Author: Christophe GISQUET <christophe.gisquet@free.fr>
+Creation: 10th February 2007
+"""
+
+from hachoir_core.field import (FieldSet,
+    UInt32, UInt16, UInt8, Int8, Float32,
+    RawBytes, String, GenericVector, ParserError)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+MAX_ENVPOINTS = 32
+
+def parseComments(parser):
+    size = parser["block_size"].value
+    if size > 0:
+        yield String(parser, "comment", size)
+
+class MidiOut(FieldSet):
+    static_size = 9*32*8
+    def createFields(self):
+        for name in ("start", "stop", "tick", "noteon", "noteoff",
+                     "volume", "pan", "banksel", "program"):
+            yield String(self, name, 32, strip='\0')
+
+class Command(FieldSet):
+    static_size = 32*8
+    def createFields(self):
+        start = self.absolute_address
+        size = self.stream.searchBytesLength("\0", False, start)
+        if size > 0:
+            self.info("Command: %s" % self.stream.readBytes(start, size))
+            yield String(self, "command", size, strip='\0')
+        yield RawBytes(self, "parameter", (self._size//8)-size)
+
+class MidiSFXExt(FieldSet):
+    static_size = 16*32*8
+    def createFields(self):
+        for index in xrange(16):
+            yield Command(self, "command[]")
+
+class MidiZXXExt(FieldSet):
+    static_size = 128*32*8
+    def createFields(self):
+        for index in xrange(128):
+            yield Command(self, "command[]")
+
+def parseMidiConfig(parser):
+    yield MidiOut(parser, "midi_out")
+    yield MidiSFXExt(parser, "sfx_ext")
+    yield MidiZXXExt(parser, "zxx_ext")
+
+def parseChannelSettings(parser):
+    size = parser["block_size"].value//4
+    if size > 0:
+        yield GenericVector(parser, "settings", size, UInt32, "mix_plugin")
+
+def parseEQBands(parser):
+    size = parser["block_size"].value//4
+    if size > 0:
+        yield GenericVector(parser, "gains", size, UInt32, "band")
+
+class SoundMixPluginInfo(FieldSet):
+    static_size = 128*8
+    def createFields(self):
+        yield textHandler(UInt32(self, "plugin_id1"), hexadecimal)
+        yield textHandler(UInt32(self, "plugin_id2"), hexadecimal)
+        yield UInt32(self, "input_routing")
+        yield UInt32(self, "output_routing")
+        yield GenericVector(self, "routing_info", 4, UInt32, "reserved")
+        yield String(self, "name", 32, strip='\0')
+        yield String(self, "dll_name", 64, desc="Original DLL name", strip='\0')
+
+class ExtraData(FieldSet):
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self._size = (4+self["size"].value)*8
+
+    def createFields(self):
+        yield UInt32(self, "size")
+        size = self["size"].value
+        if size:
+            yield RawBytes(self, "data", size)
+
+class XPlugData(FieldSet):
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self._size = (4+self["size"].value)*8
+
+    def createFields(self):
+        yield UInt32(self, "size")
+        while not self.eof:
+            yield UInt32(self, "marker")
+            if self["marker"].value == 'DWRT':
+                yield Float32(self, "dry_ratio")
+            elif self["marker"].value == 'PORG':
+                yield UInt32(self, "default_program")
+
+def parsePlugin(parser):
+    yield SoundMixPluginInfo(parser, "info")
+
+    # Check if VST setchunk present
+    size = parser.stream.readBits(parser.absolute_address+parser.current_size, 32, LITTLE_ENDIAN)
+    if 0 < size < parser.current_size + parser._size:
+        yield ExtraData(parser, "extra_data")
+
+    # Check if XPlugData is present
+    size = parser.stream.readBits(parser.absolute_address+parser.current_size, 32, LITTLE_ENDIAN)
+    if 0 < size < parser.current_size + parser._size:
+        yield XPlugData(parser, "xplug_data")
+
+# Format: "XXXX": (type, count, name)
+EXTENSIONS = {
+    # WriteInstrumentHeaderStruct@Sndfile.cpp
+    "XTPM": {
+         "..Fd": (UInt32, 1, "Flags"),
+         "..OF": (UInt32, 1, "Fade out"),
+         "..VG": (UInt32, 1, "Global Volume"),
+         "...P": (UInt32, 1, "Panning"),
+         "..EV": (UInt32, 1, "Volume Envelope"),
+         "..EP": (UInt32, 1, "Panning Envelope"),
+         ".EiP": (UInt32, 1, "Pitch Envelope"),
+         ".SLV": (UInt8, 1, "Volume Loop Start"),
+         ".ELV": (UInt8, 1, "Volume Loop End"),
+         ".BSV": (UInt8, 1, "Volume Sustain Begin"),
+         ".ESV": (UInt8, 1, "Volume Sustain End"),
+         ".SLP": (UInt8, 1, "Panning Loop Start"),
+         ".ELP": (UInt8, 1, "Panning Loop End"),
+         ".BSP": (UInt8, 1, "Panning Substain Begin"),
+         ".ESP": (UInt8, 1, "Padding Substain End"),
+         "SLiP": (UInt8, 1, "Pitch Loop Start"),
+         "ELiP": (UInt8, 1, "Pitch Loop End"),
+         "BSiP": (UInt8, 1, "Pitch Substain Begin"),
+         "ESiP": (UInt8, 1, "Pitch Substain End"),
+         ".ANN": (UInt8, 1, "NNA"),
+         ".TCD": (UInt8, 1, "DCT"),
+         ".AND": (UInt8, 1, "DNA"),
+         "..SP": (UInt8, 1, "Panning Swing"),
+         "..SV": (UInt8, 1, "Volume Swing"),
+         ".CFI": (UInt8, 1, "IFC"),
+         ".RFI": (UInt8, 1, "IFR"),
+         "..BM": (UInt32, 1, "Midi Bank"),
+         "..PM": (UInt8, 1, "Midi Program"),
+         "..CM": (UInt8, 1, "Midi Channel"),
+         ".KDM": (UInt8, 1, "Midi Drum Key"),
+         ".SPP": (Int8, 1, "PPS"),
+         ".CPP": (UInt8, 1, "PPC"),
+         ".[PV": (UInt32, MAX_ENVPOINTS, "Volume Points"),
+         ".[PP": (UInt32, MAX_ENVPOINTS, "Panning Points"),
+         "[PiP": (UInt32, MAX_ENVPOINTS, "Pitch Points"),
+         ".[EV": (UInt8, MAX_ENVPOINTS, "Volume Enveloppe"),
+         ".[EP": (UInt8, MAX_ENVPOINTS, "Panning Enveloppe"),
+         "[EiP": (UInt8, MAX_ENVPOINTS, "Pitch Enveloppe"),
+         ".[MN": (UInt8, 128, "Note Mapping"),
+         "..[K": (UInt32, 128, "Keyboard"),
+         "..[n": (String, 32, "Name"),
+         ".[nf": (String, 12, "Filename"),
+         ".PiM": (UInt8, 1, "MixPlug"),
+         "..RV": (UInt16, 1, "Volume Ramping"),
+         "...R": (UInt16, 1, "Resampling"),
+         "..SC": (UInt8, 1, "Cut Swing"),
+         "..SR": (UInt8, 1, "Res Swing"),
+         "..MF": (UInt8, 1, "Filter Mode"),
+    },
+
+    # See after "CODE tag dictionary", same place, elements with [EXT]
+    "STPM": {
+         "...C": (UInt32, 1, "Channels"),
+         ".VWC": (None, 0, "CreatedWith version"),
+         ".VGD": (None, 0, "Default global volume"),
+         "..TD": (None, 0, "Default tempo"),
+         "HIBE": (None, 0, "Embedded instrument header"),
+         "VWSL": (None, 0, "LastSavedWith version"),
+         ".MMP": (None, 0, "Plugin Mix mode"),
+         ".BPR": (None, 0, "Rows per beat"),
+         ".MPR": (None, 0, "Rows per measure"),
+         "@PES": (None, 0, "Chunk separator"),
+         ".APS": (None, 0, "Song Pre-amplification"),
+         "..MT": (None, 0, "Tempo mode"),
+         "VTSV": (None, 0, "VSTi volume"),
+    }
+}
+
+class MPField(FieldSet):
+    def __init__(self, parent, name, ext, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self.ext = ext
+        self.info(self.createDescription())
+        self._size = (6+self["data_size"].value)*8
+
+    def createFields(self):
+        # Identify tag
+        code = self.stream.readBytes(self.absolute_address, 4)
+        if code in self.ext:
+            cls, count, comment = self.ext[code]
+        else:
+            cls, count, comment = RawBytes, 1, "Unknown tag"
+
+        # Header
+        yield String(self, "code", 4, comment)
+        yield UInt16(self, "data_size")
+
+        # Data
+        if not cls:
+            size = self["data_size"].value
+            if size > 0:
+                yield RawBytes(self, "data", size)
+        elif cls in (String, RawBytes):
+            yield cls(self, "value", count)
+        else:
+            if count > 1:
+                yield GenericVector(self, "values", count, cls, "item")
+            else:
+                yield cls(self, "value")
+
+    def createDescription(self):
+        return "Element '%s', size %i" % \
+               (self["code"]._description, self["data_size"].value)
+
+def parseFields(parser):
+    # Determine field names
+    ext = EXTENSIONS[parser["block_type"].value]
+    if ext == None:
+        raise ParserError("Unknown parent '%s'" % parser["block_type"].value)
+
+    # Parse fields
+    addr = parser.absolute_address + parser.current_size
+    while not parser.eof and parser.stream.readBytes(addr, 4) in ext:
+        field = MPField(parser, "field[]", ext)
+        yield field
+        addr += field._size
+
+    # Abort on unknown codes
+    parser.info("End of extension '%s' when finding '%s'" %
+           (parser["block_type"].value, parser.stream.readBytes(addr, 4)))
+
+class ModplugBlock(FieldSet):
+    BLOCK_INFO = {
+        "TEXT": ("comment", True, "Comment", parseComments),
+        "MIDI": ("midi_config", True, "Midi configuration", parseMidiConfig),
+        "XFHC": ("channel_settings", True, "Channel settings", parseChannelSettings),
+        "XTPM": ("instrument_ext", False, "Instrument extensions", parseFields),
+        "STPM": ("song_ext", False, "Song extensions", parseFields),
+    }
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self.parseBlock = parsePlugin
+
+        t = self["block_type"].value
+        self.has_size = False
+        if t in self.BLOCK_INFO:
+            self._name, self.has_size, desc, parseBlock = self.BLOCK_INFO[t]
+            if callable(desc):
+                self.createDescription = lambda: desc(self)
+            if parseBlock:
+                self.parseBlock = lambda: parseBlock(self)
+
+        if self.has_size:
+            self._size = 8*(self["block_size"].value + 8)
+
+    def createFields(self):
+        yield String(self, "block_type", 4)
+        if self.has_size:
+            yield UInt32(self, "block_size")
+
+        if self.parseBlock:
+            for field in self.parseBlock():
+                yield field
+
+        if self.has_size:
+            size = self["block_size"].value - (self.current_size//8)
+            if size > 0:
+                yield RawBytes(self, "data", size, "Unknown data")
+
+def ParseModplugMetadata(parser):
+    while not parser.eof:
+        block = ModplugBlock(parser, "block[]")
+        yield block
+        if block["block_type"].value == "STPM":
+            break
+
+    # More undocumented stuff: date ?
+    size = (parser._size - parser.absolute_address - parser.current_size)//8
+    if size > 0:
+        yield RawBytes(parser, "info", size)
+
diff --git a/lib/hachoir_parser/audio/mpeg_audio.py b/lib/hachoir_parser/audio/mpeg_audio.py
new file mode 100644
index 0000000000000000000000000000000000000000..04e7d327af616951816237e7c2d952dc1c355be5
--- /dev/null
+++ b/lib/hachoir_parser/audio/mpeg_audio.py
@@ -0,0 +1,408 @@
+"""
+MPEG audio file parser.
+
+Creation: 12 decembre 2005
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    MissingField, ParserError, createOrphanField,
+    Bit, Bits, Enum,
+    PaddingBits, PaddingBytes,
+    RawBytes)
+from hachoir_parser.audio.id3 import ID3v1, ID3v2
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.tools import humanFrequency, humanBitSize
+from hachoir_core.bits import long2raw
+from hachoir_core.error import HACHOIR_ERRORS
+from hachoir_core.stream import InputStreamError
+
+# Max MP3 filesize: 200 MB
+MAX_FILESIZE = 200*1024*1024*8
+
+class Frame(FieldSet):
+    VERSION_NAME = { 0: "2.5", 2: "2", 3: "1" }
+    MPEG_I = 3
+    MPEG_II = 2
+    MPEG_II_5 = 0
+
+    LAYER_NAME = { 1: "III", 2: "II", 3: "I" }
+    LAYER_I = 3
+    LAYER_II = 2
+    LAYER_III = 1
+
+    # Bit rates (bit_rate * 1000 = bits/sec)
+    # key 15 is always invalid
+    BIT_RATES = {
+        1: ( # MPEG1
+            ( 0, 32,  64,  96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448 ), # layer I
+            ( 0, 32,  48,  56,  64,  80,  96, 112, 128, 160, 192, 224, 256, 320, 384 ), # layer II
+            ( 0, 32,  40,  48,  56,  64,  80,  96, 112, 128, 160, 192, 224, 256, 320 ), # layer III
+            # -   1    2    3    4    5    6    7    8    9   10   11   12   13   14 -
+        ),
+        2: ( # MPEG2 / MPEG2.5
+            ( 0, 32,  48,  56,  64,  80,  96, 112, 128, 144, 160, 176, 192, 224, 256 ), # layer I
+            ( 0,  8,  16,  24,  32,  40,  48,  56,  64,  80,  96, 112, 128, 144, 160 ), # layer II
+            ( 0,  8,  16,  24,  32,  40,  48,  56,  64,  80,  96, 112, 128, 144, 160 ), # layer III
+            # -   1    2    3    4    5    6    7    8    9   10   11   12   13   14 -
+        )
+    }
+    SAMPLING_RATES = {
+        3: {0: 44100, 1: 48000, 2: 32000},  # MPEG1
+        2: {0: 22050, 1: 24000, 2: 16000},  # MPEG2
+        0: {0: 11025, 1: 12000, 2: 8000}    # MPEG2.5
+    }
+    EMPHASIS_NAME = {0: "none", 1: "50/15 ms",  3: "CCIT J.17"}
+    CHANNEL_MODE_NAME = {
+        0: "Stereo",
+        1: "Joint stereo",
+        2: "Dual channel",
+        3: "Single channel"
+    }
+    # Channel mode => number of channels
+    NB_CHANNEL = {
+        0: 2,
+        1: 2,
+        2: 2,
+        3: 1,
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        if not self._size:
+            frame_size = self.getFrameSize()
+            if not frame_size:
+                raise ParserError("MPEG audio: Invalid frame %s" % self.path)
+            self._size = min(frame_size * 8, self.parent.size - self.address)
+
+    def createFields(self):
+        # Header
+        yield PaddingBits(self, "sync", 11, "Synchronize bits (set to 1)", pattern=1)
+        yield Enum(Bits(self, "version", 2, "MPEG audio version"), self.VERSION_NAME)
+        yield Enum(Bits(self, "layer", 2, "MPEG audio layer"), self.LAYER_NAME)
+        yield Bit(self, "crc16", "No CRC16 protection?")
+
+        # Rates and padding
+        yield Bits(self, "bit_rate", 4, "Bit rate")
+        yield Bits(self, "sampling_rate", 2, "Sampling rate")
+        yield Bit(self, "use_padding", "Stream field use padding?")
+        yield Bit(self, "extension", "Extension")
+
+        # Channel mode, mode extension, copyright, ...
+        yield Enum(Bits(self, "channel_mode", 2, "Channel mode"), self.CHANNEL_MODE_NAME)
+        yield Bits(self, "mode_ext", 2, "Mode extension")
+        yield Bit(self, "copyright", "Is copyrighted?")
+        yield Bit(self, "original", "Is original?")
+        yield Enum(Bits(self, "emphasis", 2, "Emphasis"), self.EMPHASIS_NAME)
+
+        size = (self.size - self.current_size) / 8
+        if size:
+            yield RawBytes(self, "data", size)
+
+    def isValid(self):
+        return (self["layer"].value != 0
+            and self["sync"].value == 2047
+            and self["version"].value != 1
+            and self["sampling_rate"].value != 3
+            and self["bit_rate"].value not in (0, 15)
+            and self["emphasis"].value != 2)
+
+    def getSampleRate(self):
+        """
+        Read sampling rate. Returns None on error.
+        """
+        version = self["version"].value
+        rate = self["sampling_rate"].value
+        try:
+            return self.SAMPLING_RATES[version][rate]
+        except (KeyError, IndexError):
+            return None
+
+    def getBitRate(self):
+        """
+        Read bit rate in bit/sec. Returns None on error.
+        """
+        layer = 3 - self["layer"].value
+        bit_rate = self["bit_rate"].value
+        if bit_rate in (0, 15):
+            return None
+        if self["version"].value == 3:
+            dataset = self.BIT_RATES[1] # MPEG1
+        else:
+            dataset = self.BIT_RATES[2] # MPEG2 / MPEG2.5
+        try:
+            return dataset[layer][bit_rate] * 1000
+        except (KeyError, IndexError):
+            return None
+
+    def getFrameSize(self):
+        """
+        Read frame size in bytes. Returns None on error.
+        """
+        frame_size = self.getBitRate()
+        if not frame_size:
+            return None
+        sample_rate = self.getSampleRate()
+        if not sample_rate:
+            return None
+        padding = int(self["use_padding"].value)
+
+        if self["layer"].value == self.LAYER_III:
+            if self["version"].value == self.MPEG_I:
+                return (frame_size * 144) // sample_rate + padding
+            else:
+                return (frame_size * 72)  // sample_rate + padding
+        elif self["layer"].value == self.LAYER_II:
+            return (frame_size * 144) / sample_rate + padding
+        else: # self.LAYER_I:
+            frame_size = (frame_size * 12) / sample_rate
+            return (frame_size + padding) * 4
+
+    def getNbChannel(self):
+        return self.NB_CHANNEL[ self["channel_mode"].value ]
+
+    def createDescription(self):
+        info = ["layer %s" % self["layer"].display]
+        bit_rate = self.getBitRate()
+        if bit_rate:
+            info.append("%s/sec" % humanBitSize(bit_rate))
+        sampling_rate = self.getSampleRate()
+        if sampling_rate:
+            info.append(humanFrequency(sampling_rate))
+        return "MPEG-%s %s" % (self["version"].display, ", ".join(info))
+
+def findSynchronizeBits(parser, start, max_size):
+    """
+    Find synchronisation bits (11 bits set to 1)
+
+    Returns None on error, or number of bytes before the synchronization.
+    """
+    address0 = parser.absolute_address
+    end = start + max_size
+    size = 0
+    while start < end:
+        # Fast search: search 0xFF (first byte of sync frame field)
+        length = parser.stream.searchBytesLength("\xff", False, start, end)
+        if length is None:
+            return None
+        size += length
+        start += length * 8
+
+        # Strong validation of frame: create the frame
+        # and call method isValid()
+        try:
+            frame = createOrphanField(parser, start-address0, Frame, "frame")
+            valid = frame.isValid()
+        except HACHOIR_ERRORS:
+            valid = False
+        if valid:
+            return size
+
+        # Invalid frame: continue
+        start += 8
+        size += 1
+    return None
+
+class Frames(FieldSet):
+    # Padding bytes allowed before a frame
+    MAX_PADDING = 256
+
+    def synchronize(self):
+        addr = self.absolute_address
+        start = addr + self.current_size
+        end = min(start + self.MAX_PADDING*8, addr + self.size)
+        padding = findSynchronizeBits(self, start, end)
+        if padding is None:
+            raise ParserError("MPEG audio: Unable to find synchronization bits")
+        if padding:
+            return PaddingBytes(self, "padding[]", padding, "Padding before synchronization")
+        else:
+            return None
+
+    def looksConstantBitRate(self, count=10):
+        """
+        Guess if frames are constant bit rate. If it returns False, you can
+        be sure that frames are variable bit rate. Otherwise, it looks like
+        constant bit rate (on first count fields).
+        """
+        check_keys = ("version", "layer", "bit_rate")
+        last_field = None
+        for index, field in enumerate(self.array("frame")):
+            if last_field:
+                for key in check_keys:
+                    if field[key].value != last_field[key].value:
+                        return False
+            last_field = field
+            if index == count:
+                break
+        return True
+
+    def createFields(self):
+        # Find synchronisation bytes
+        padding = self.synchronize()
+        if padding:
+            yield padding
+
+        while self.current_size < self.size:
+            yield Frame(self, "frame[]")
+#            padding = self.synchronize()
+#            if padding:
+#                yield padding
+
+        # Read raw bytes at the end (if any)
+        size = (self.size - self.current_size) / 8
+        if size:
+            yield RawBytes(self, "raw", size)
+
+    def createDescription(self):
+        if self.looksConstantBitRate():
+            text = "(looks like) Constant bit rate (CBR)"
+        else:
+            text = "Variable bit rate (VBR)"
+        return "Frames: %s" % text
+
+def createMpegAudioMagic():
+
+    # ID3v1 magic
+    magics = [("TAG", 0)]
+
+    # ID3v2 magics
+    for ver_major in ID3v2.VALID_MAJOR_VERSIONS:
+       magic = "ID3%c\x00" % ver_major
+       magics.append( (magic,0) )
+
+    # MPEG frame magic
+    # TODO: Use longer magic: 32 bits instead of 16 bits
+    SYNC_BITS = 2047
+    for version in Frame.VERSION_NAME.iterkeys():
+        for layer in Frame.LAYER_NAME.iterkeys():
+            for crc16 in (0, 1):
+                magic = (SYNC_BITS << 5) | (version << 3) | (layer << 1) | crc16
+                magic = long2raw(magic, BIG_ENDIAN, 2)
+                magics.append( (magic, 0) )
+    return magics
+
+class MpegAudioFile(Parser):
+    PARSER_TAGS = {
+        "id": "mpeg_audio",
+        "category": "audio",
+        "file_ext": ("mpa", "mp1", "mp2", "mp3"),
+        "mime": (u"audio/mpeg",),
+        "min_size": 4*8,
+#        "magic": createMpegAudioMagic(),
+        "description": "MPEG audio version 1, 2, 2.5",
+        "subfile": "skip",
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self[0].name in ("id3v2", "id3v1"):
+            return True
+
+        if not self.stream.checked: # TODO: is it possible to handle piped input?
+            return False
+
+        # Validate first 5 frames
+        for index in xrange(5):
+            try:
+                frame = self["frames/frame[%u]" % index]
+            except MissingField:
+                # Require a least one valid frame
+                if (1 <= index) \
+                and self["frames"].done:
+                    return True
+                return "Unable to get frame #%u" % index
+            except (InputStreamError, ParserError):
+                return "Unable to create frame #%u" % index
+
+            # Check first frame values
+            if not frame.isValid():
+                return "Frame #%u is invalid" % index
+
+            # Check that all frames are similar
+            if not index:
+                frame0 = frame
+            else:
+                if frame0["channel_mode"].value != frame["channel_mode"].value:
+                    return "Frame #%u channel mode is different" % index
+        return True
+
+    def createFields(self):
+        # Read ID3v2 (if any)
+        if self.stream.readBytes(0, 3) == "ID3":
+            yield ID3v2(self, "id3v2")
+
+        if self._size is None: # TODO: is it possible to handle piped input?
+            raise NotImplementedError
+
+        # Check if file is ending with ID3v1 or not and compute frames size
+        frames_size = self.size - self.current_size
+        addr = self.size - 128*8
+        if 0 <= addr:
+            has_id3 = (self.stream.readBytes(addr, 3) == "TAG")
+            if has_id3:
+                frames_size -= 128*8
+        else:
+            has_id3 = False
+
+        # Read frames (if any)
+        if frames_size:
+            yield Frames(self, "frames", size=frames_size)
+
+        # Read ID3v1 (if any)
+        if has_id3:
+            yield ID3v1(self, "id3v1")
+
+    def createDescription(self):
+        if "frames" in self:
+            frame = self["frames/frame[0]"]
+            return "%s, %s" % (frame.description, frame["channel_mode"].display)
+        elif "id3v2" in self:
+            return self["id3v2"].description
+        elif "id3v1" in self:
+            return self["id3v1"].description
+        else:
+            return "MPEG audio"
+
+    def createContentSize(self):
+        # Get "frames" field
+        field = self[0]
+        if field.name != "frames":
+            try:
+                field = self[1]
+            except MissingField:
+                # File only contains ID3v1 or ID3v2
+                return field.size
+
+            # Error: second field are not the frames"?
+            if field.name != "frames":
+                return None
+
+        # Go to last frame
+        frames = field
+        frame = frames["frame[0]"]
+        address0 = field.absolute_address
+        size = address0 + frame.size
+        while True:
+            try:
+                # Parse one MPEG audio frame
+                frame = createOrphanField(frames, size - address0, Frame, "frame")
+
+                # Check frame 32 bits header
+                if not frame.isValid():
+                    break
+            except HACHOIR_ERRORS:
+                break
+            if MAX_FILESIZE < (size + frame.size):
+                break
+            size += frame.size
+
+        # ID3v1 at the end?
+        try:
+            if self.stream.readBytes(size, 3) == "TAG":
+                size += ID3v1.static_size
+        except InputStreamError:
+            pass
+        return size
+
diff --git a/lib/hachoir_parser/audio/real_audio.py b/lib/hachoir_parser/audio/real_audio.py
new file mode 100644
index 0000000000000000000000000000000000000000..289ed6e1ebbbb0796bd467be1467c2872c9b5b09
--- /dev/null
+++ b/lib/hachoir_parser/audio/real_audio.py
@@ -0,0 +1,90 @@
+"""
+RealAudio (.ra) parser
+
+Author: Mike Melanson
+References:
+  http://wiki.multimedia.cx/index.php?title=RealMedia
+Samples:
+  http://samples.mplayerhq.hu/real/RA/
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt8, UInt16, UInt32,
+    Bytes, RawBytes, String,
+    PascalString8)
+from hachoir_core.tools import humanFrequency
+from hachoir_core.text_handler import displayHandler
+from hachoir_core.endian import BIG_ENDIAN
+
+class Metadata(FieldSet):
+    def createFields(self):
+        yield PascalString8(self, "title", charset="ISO-8859-1")
+        yield PascalString8(self, "author", charset="ISO-8859-1")
+        yield PascalString8(self, "copyright", charset="ISO-8859-1")
+        yield PascalString8(self, "comment", charset="ISO-8859-1")
+
+class RealAudioFile(Parser):
+    MAGIC = ".ra\xFD"
+    PARSER_TAGS = {
+        "id": "real_audio",
+        "category": "audio",
+        "file_ext": ["ra"],
+        "mime": (u"audio/x-realaudio", u"audio/x-pn-realaudio"),
+        "min_size": 6*8,
+        "magic": ((MAGIC, 0),),
+        "description": u"Real audio (.ra)",
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self["signature"].value != self.MAGIC:
+            return "Invalid signature"
+        if self["version"].value not in (3, 4):
+            return "Unknown version"
+        return True
+
+    def createFields(self):
+        yield Bytes(self, "signature", 4, r"RealAudio identifier ('.ra\xFD')")
+        yield UInt16(self, "version", "Version")
+        if self["version"].value == 3:
+            yield UInt16(self, "header_size", "Header size")
+            yield RawBytes(self, "Unknown1", 10)
+            yield UInt32(self, "data_size", "Data size")
+            yield Metadata(self, "metadata")
+            yield UInt8(self, "Unknown2")
+            yield PascalString8(self, "FourCC")
+            audio_size = self["data_size"].value
+        else: # version = 4
+            yield UInt16(self, "reserved1", "Reserved, should be 0")
+            yield String(self, "ra4sig", 4, "'.ra4' signature")
+            yield UInt32(self, "filesize", "File size (minus 40 bytes)")
+            yield UInt16(self, "version2", "Version 2 (always equal to version)")
+            yield UInt32(self, "headersize", "Header size (minus 16)")
+            yield UInt16(self, "codec_flavor", "Codec flavor")
+            yield UInt32(self, "coded_frame_size", "Coded frame size")
+            yield RawBytes(self, "unknown1", 12)
+            yield UInt16(self, "subpacketh", "Subpacket h (?)")
+            yield UInt16(self, "frame_size", "Frame size")
+            yield UInt16(self, "sub_packet_size", "Subpacket size")
+            yield UInt16(self, "unknown2", "Unknown")
+            yield displayHandler(UInt16(self, "sample_rate", "Sample rate"), humanFrequency)
+            yield UInt16(self, "unknown3", "Unknown")
+            yield UInt16(self, "sample_size", "Sample size")
+            yield UInt16(self, "channels", "Channels")
+            yield PascalString8(self, "Interleaving ID String")
+            yield PascalString8(self, "FourCC")
+            yield RawBytes(self, "unknown4", 3)
+            yield Metadata(self, "metadata")
+            audio_size = (self["filesize"].value + 40) - (self["headersize"].value + 16)
+        if 0 < audio_size:
+            yield RawBytes(self, "audio_data", audio_size)
+
+    def createDescription(self):
+        if (self["version"].value == 3):
+            return "RealAudio v3 file, '%s' codec" % self["FourCC"].value
+        elif (self["version"].value == 4):
+            return "RealAudio v4 file, '%s' codec, %s, %u channels" % (
+                self["FourCC"].value, self["sample_rate"].display, self["channels"].value)
+        else:
+            return "Real audio"
diff --git a/lib/hachoir_parser/audio/s3m.py b/lib/hachoir_parser/audio/s3m.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b2a73260fed885603e460dcfab8329bd0ef6cf9
--- /dev/null
+++ b/lib/hachoir_parser/audio/s3m.py
@@ -0,0 +1,668 @@
+"""
+The ScreamTracker 3.0x module format description for .s3m files.
+
+Documents:
+- Search s3m on Wotsit
+  http://www.wotsit.org/
+
+Author: Christophe GISQUET <christophe.gisquet@free.fr>
+Creation: 11th February 2007
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (StaticFieldSet, FieldSet, Field,
+    Bit, Bits,
+    UInt32, UInt16, UInt8, Enum,
+    PaddingBytes, RawBytes, NullBytes,
+    String, GenericVector, ParserError)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.tools import alignValue
+
+class Chunk:
+    def __init__(self, cls, name, offset, size, *args):
+        # Todo: swap and have None=unknown instead of now: 0=unknown
+        assert size != None and size>=0
+        self.cls = cls
+        self.name = name
+        self.offset = offset
+        self.size = size
+        self.args = args
+
+class ChunkIndexer:
+    def __init__(self):
+        self.chunks = [ ]
+
+    # Check if a chunk fits
+    def canHouse(self, chunk, index):
+        if index > 1:
+            if chunk.offset + chunk.size > self.chunks[index-1].offset:
+                return False
+        # We could test now that it fits in the memory
+        return True
+
+    # Farthest element is last
+    def addChunk(self, new_chunk):
+        index = 0
+        # Find first chunk whose value is bigger
+        while index < len(self.chunks):
+            offset = self.chunks[index].offset
+            if offset < new_chunk.offset:
+                if not self.canHouse(new_chunk, index):
+                    raise ParserError("Chunk '%s' doesn't fit!" % new_chunk.name)
+                self.chunks.insert(index, new_chunk)
+                return
+            index += 1
+
+        # Not found or empty
+        # We could at least check that it fits in the memory
+        self.chunks.append(new_chunk)
+
+    def yieldChunks(self, obj):
+        while len(self.chunks) > 0:
+            chunk = self.chunks.pop()
+            current_pos = obj.current_size//8
+
+            # Check if padding needed
+            size = chunk.offset - current_pos
+            if size > 0:
+                obj.info("Padding of %u bytes needed: curr=%u offset=%u" % \
+                         (size, current_pos, chunk.offset))
+                yield PaddingBytes(obj, "padding[]", size)
+                current_pos = obj.current_size//8
+
+            # Find resynch point if needed
+            count = 0
+            old_off = chunk.offset
+            while chunk.offset < current_pos:
+                count += 1
+                chunk = self.chunks.pop()
+                # Unfortunaly, we also pass the underlying chunks
+                if chunk == None:
+                    obj.info("Couldn't resynch: %u object skipped to reach %u" % \
+                             (count, current_pos))
+                    return
+
+            # Resynch
+            size = chunk.offset-current_pos
+            if size > 0:
+                obj.info("Skipped %u objects to resynch to %u; chunk offset: %u->%u" % \
+                         (count, current_pos, old_off, chunk.offset))
+                yield RawBytes(obj, "resynch[]", size)
+
+            # Yield
+            obj.info("Yielding element of size %u at offset %u" % \
+                     (chunk.size, chunk.offset))
+            field = chunk.cls(obj, chunk.name, chunk.size, *chunk.args)
+            # Not tested, probably wrong:
+            #if chunk.size: field.static_size = 8*chunk.size
+            yield field
+
+            if hasattr(field, "getSubChunks"):
+                for sub_chunk in field.getSubChunks():
+                    obj.info("Adding sub chunk: position=%u size=%u name='%s'" % \
+                             (sub_chunk.offset, sub_chunk.size, sub_chunk.name))
+                    self.addChunk(sub_chunk)
+
+            # Let missing padding be done by next chunk
+
+class S3MFlags(StaticFieldSet):
+    format = (
+        (Bit, "st2_vibrato", "Vibrato (File version 1/ScreamTrack 2)"),
+        (Bit, "st2_tempo", "Tempo (File version 1/ScreamTrack 2)"),
+        (Bit, "amiga_slides", "Amiga slides (File version 1/ScreamTrack 2)"),
+        (Bit, "zero_vol_opt", "Automatically turn off looping notes whose volume is zero for >2 note rows"),
+        (Bit, "amiga_limits", "Disallow notes beyond Amiga hardware specs"),
+        (Bit, "sb_processing", "Enable filter/SFX with SoundBlaster"),
+        (Bit, "vol_slide", "Volume slide also performed on first row"),
+        (Bit, "extended", "Special custom data in file"),
+        (Bits, "unused[]", 8)
+    )
+
+def parseChannelType(val):
+    val = val.value
+    if val<8:
+        return "Left Sample Channel %u" % val
+    if val<16:
+        return "Right Sample Channel %u" % (val-8)
+    if val<32:
+        return "Adlib channel %u" % (val-16)
+    return "Value %u unknown" % val
+
+class ChannelSettings(FieldSet):
+    static_size = 8
+    def createFields(self):
+        yield textHandler(Bits(self, "type", 7), parseChannelType)
+        yield Bit(self, "enabled")
+
+class ChannelPanning(FieldSet):
+    static_size = 8
+    def createFields(self):
+        yield Bits(self, "default_position", 4, "Default pan position")
+        yield Bit(self, "reserved[]")
+        yield Bit(self, "use_default", "Bits 0:3 specify default position")
+        yield Bits(self, "reserved[]", 2)
+
+# Provide an automatic constructor
+class SizeFieldSet(FieldSet):
+    """
+    Provide an automatic constructor for a sized field that can be aligned
+    on byte positions according to ALIGN.
+
+    Size is ignored if static_size is set. Real size is stored
+    for convenience, but beware, it is not in bits, but in bytes.
+
+    Field can be automatically padded, unless:
+    - size is 0 (unknown, so padding doesn't make sense)
+    - it shouldn't be aligned
+
+    If it shouldn't be aligned, two solutions:
+    - change _size to another value than the one found through aligment.
+    - derive a class with ALIGN = 0.
+    """
+    ALIGN = 16
+    def __init__(self, parent, name, size, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        if size:
+            self.real_size = size
+            if self.static_size == None:
+                self.setCheckedSizes(size)
+
+    def setCheckedSizes(self, size):
+        # First set size so that end is aligned, if needed
+        self.real_size = size
+        size *= 8
+        if self.ALIGN:
+            size = alignValue(self.absolute_address+size, 8*self.ALIGN) \
+                   - self.absolute_address
+
+        if self._parent._size:
+            if self._parent.current_size + size > self._parent._size:
+                size = self._parent._size - self._parent.current_size
+
+        self._size = size
+
+    def createFields(self):
+        for field in self.createUnpaddedFields():
+            yield field
+        size = (self._size - self.current_size)//8
+        if size > 0:
+            yield PaddingBytes(self, "padding", size)
+
+class Header(SizeFieldSet):
+    def createDescription(self):
+        return "%s (%u patterns, %u instruments)" % \
+               (self["title"].value, self["num_patterns"].value,
+                self["num_instruments"].value)
+
+    def createValue(self):
+        return self["title"].value
+
+    # Header fields may have to be padded - specify static_size
+    # or modify _size in a derived class if never.
+    def createUnpaddedFields(self):
+        yield String(self, "title", 28, strip='\0')
+        yield textHandler(UInt8(self, "marker[]"), hexadecimal)
+        for field in self.getFileVersionField():
+            yield field
+
+        yield UInt16(self, "num_orders")
+        yield UInt16(self, "num_instruments")
+        yield UInt16(self, "num_patterns")
+
+        for field in self.getFirstProperties():
+            yield field
+        yield String(self, "marker[]", 4)
+        for field in self.getLastProperties():
+            yield field
+
+        yield GenericVector(self, "channel_settings", 32,
+                            ChannelSettings, "channel")
+
+        # Orders
+        yield GenericVector(self, "orders", self.getNumOrders(), UInt8, "order")
+
+        for field in self.getHeaderEndFields():
+            yield field
+
+class S3MHeader(Header):
+    """
+          0   1   2   3   4   5   6   7   8   9   A   B   C   D   E   F
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0000: | Song name, max 28 chars (end with NUL (0))                    |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0010: |                                               |1Ah|Typ| x | x |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0020: |OrdNum |InsNum |PatNum | Flags | Cwt/v | Ffi   |'S'|'C'|'R'|'M'|
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0030: |g.v|i.s|i.t|m.v|u.c|d.p| x | x | x | x | x | x | x | x |Special|
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0040: |Channel settings for 32 channels, 255=unused,+128=disabled     |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0050: |                                                               |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0060: |Orders; length=OrdNum (should be even)                         |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  xxx1: |Parapointers to instruments; length=InsNum*2                   |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  xxx2: |Parapointers to patterns; length=PatNum*2                      |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  xxx3: |Channel default pan positions                                  |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+        xxx1=70h+orders
+        xxx2=70h+orders+instruments*2
+        xxx3=70h+orders+instruments*2+patterns*2
+    """
+    def __init__(self, parent, name, size, desc=None):
+        Header.__init__(self, parent, name, size, desc)
+
+        # Overwrite real_size
+        size = 0x60 + self["num_orders"].value + \
+               2*(self["num_instruments"].value + self["num_patterns"].value)
+        if self["panning_info"].value == 252:
+            size += 32
+
+        # Deduce size for SizeFieldSet
+        self.setCheckedSizes(size)
+
+    def getFileVersionField(self):
+        yield UInt8(self, "type")
+        yield RawBytes(self, "reserved[]", 2)
+
+    def getFirstProperties(self):
+        yield S3MFlags(self, "flags")
+        yield UInt8(self, "creation_version_minor")
+        yield Bits(self, "creation_version_major", 4)
+        yield Bits(self, "creation_version_unknown", 4, "(=1)")
+        yield UInt16(self, "format_version")
+
+    def getLastProperties(self):
+        yield UInt8(self, "glob_vol", "Global volume")
+        yield UInt8(self, "init_speed", "Initial speed (command A)")
+        yield UInt8(self, "init_tempo", "Initial tempo (command T)")
+        yield Bits(self, "volume", 7)
+        yield Bit(self, "stereo")
+        yield UInt8(self, "click_removal", "Number of GUS channels to run to prevent clicks")
+        yield UInt8(self, "panning_info")
+        yield RawBytes(self, "reserved[]", 8)
+        yield UInt16(self, "custom_data_parapointer",
+                     "Parapointer to special custom data (not used by ST3.01)")
+
+    def getNumOrders(self): return self["num_orders"].value
+
+    def getHeaderEndFields(self):
+        instr = self["num_instruments"].value
+        patterns = self["num_patterns"].value
+        # File pointers
+        if instr > 0:
+            yield GenericVector(self, "instr_pptr", instr, UInt16, "offset")
+        if patterns > 0:
+            yield GenericVector(self, "pattern_pptr", patterns, UInt16, "offset")
+
+        # S3M 3.20 extension
+        if self["creation_version_major"].value >= 3 \
+        and self["creation_version_minor"].value >= 0x20 \
+        and self["panning_info"].value == 252:
+            yield GenericVector(self, "channel_panning", 32, ChannelPanning, "channel")
+
+        # Padding required for 16B alignment
+        size = self._size - self.current_size
+        if size > 0:
+            yield PaddingBytes(self, "padding", size//8)
+
+    def getSubChunks(self):
+        # Instruments -  no warranty that they are concatenated
+        for index in xrange(self["num_instruments"].value):
+            yield Chunk(S3MInstrument, "instrument[]",
+                        16*self["instr_pptr/offset[%u]" % index].value,
+                        S3MInstrument.static_size//8)
+
+        # Patterns - size unknown but listed in their headers
+        for index in xrange(self["num_patterns"].value):
+            yield Chunk(S3MPattern, "pattern[]",
+                        16*self["pattern_pptr/offset[%u]" % index].value, 0)
+
+class PTMHeader(Header):
+    # static_size should prime over _size, right?
+    static_size = 8*608
+
+    def getTrackerVersion(val):
+        val = val.value
+        return "ProTracker x%04X" % val
+
+    def getFileVersionField(self):
+        yield UInt16(self, "type")
+        yield RawBytes(self, "reserved[]", 1)
+
+    def getFirstProperties(self):
+        yield UInt16(self, "channels")
+        yield UInt16(self, "flags") # 0 => NullBytes
+        yield UInt16(self, "reserved[]")
+
+    def getLastProperties(self):
+        yield RawBytes(self, "reserved[]", 16)
+
+    def getNumOrders(self): return 256
+
+    def getHeaderEndFields(self):
+        yield GenericVector(self, "pattern_pptr", 128, UInt16, "offset")
+
+    def getSubChunks(self):
+        # It goes like this in the BS: patterns->instruments->instr. samples
+
+        if self._parent._size:
+            min_off = self.absolute_address+self._parent._size
+        else:
+            min_off = 99999999999
+
+        # Instruments and minimal end position for last pattern
+        count = self["num_instruments"].value
+        addr = self.absolute_address
+        for index in xrange(count):
+            offset = (self.static_size+index*PTMInstrument.static_size)//8
+            yield Chunk(PTMInstrument, "instrument[]", offset,
+                        PTMInstrument.static_size//8)
+            offset = self.stream.readBits(addr+8*(offset+18), 32, LITTLE_ENDIAN)
+            min_off = min(min_off, offset)
+
+        # Patterns
+        count = self["num_patterns"].value
+        prev_off = 16*self["pattern_pptr/offset[0]"].value
+        for index in range(1, count):
+            offset = 16*self["pattern_pptr/offset[%u]" % index].value
+            yield Chunk(PTMPattern, "pattern[]", prev_off, offset-prev_off)
+            prev_off = offset
+
+        # Difficult to account for
+        yield Chunk(PTMPattern, "pattern[]", prev_off, min_off-prev_off)
+
+class SampleFlags(StaticFieldSet):
+    format = (
+        (Bit, "loop_on"),
+        (Bit, "stereo", "Sample size will be 2*length"),
+        (Bit, "16bits", "16b sample, Intel LO-HI byteorder"),
+        (Bits, "unused", 5)
+    )
+
+class S3MUInt24(Field):
+    static_size = 24
+    def __init__(self, parent, name, desc=None):
+        Field.__init__(self, parent, name, size=24, description=desc)
+        addr = self.absolute_address
+        val = parent.stream.readBits(addr, 8, LITTLE_ENDIAN) << 20
+        val += parent.stream.readBits(addr+8, 16, LITTLE_ENDIAN) << 4
+        self.createValue = lambda: val
+
+class SampleData(SizeFieldSet):
+    def createUnpaddedFields(self):
+        yield RawBytes(self, "data", self.real_size)
+class PTMSampleData(SampleData):
+    ALIGN = 0
+
+class Instrument(SizeFieldSet):
+    static_size = 8*0x50
+
+    def createDescription(self):
+        info = [self["c4_speed"].display]
+        if "flags/stereo" in self:
+            if self["flags/stereo"].value:
+                info.append("stereo")
+            else:
+                info.append("mono")
+        info.append("%u bits" % self.getSampleBits())
+        return ", ".join(info)
+
+    # Structure knows its size and doesn't need padding anyway, so
+    # overwrite base member: no need to go through it.
+    def createFields(self):
+        yield self.getType()
+        yield String(self, "filename", 12, strip='\0')
+
+        for field in self.getInstrumentFields():
+            yield field
+
+        yield String(self, "name", 28, strip='\0')
+        yield String(self, "marker", 4, "Either 'SCRS' or '(empty)'", strip='\0')
+
+    def createValue(self):
+        return self["name"].value
+
+class S3MInstrument(Instrument):
+    """
+    In fact a sample. Description follows:
+
+          0   1   2   3   4   5   6   7   8   9   A   B   C   D   E   F
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0000: |[T]| Dos filename (12345678.ABC)                   |    MemSeg |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0010: |Length |HI:leng|LoopBeg|HI:LBeg|LoopEnd|HI:Lend|Vol| x |[P]|[F]|
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0020: |C2Spd  |HI:C2sp| x | x | x | x |Int:Gp |Int:512|Int:lastused   |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0030: | Sample name, 28 characters max... (incl. NUL)                 |
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  0040: | ...sample name...                             |'S'|'C'|'R'|'S'|
+        +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
+  xxxx: sampledata
+    """
+    MAGIC = "SCRS"
+    PACKING = {0: "Unpacked", 1: "DP30ADPCM" }
+    TYPE = {0: "Unknown", 1: "Sample", 2: "adlib melody", 3: "adlib drum2" }
+
+    def getType(self):
+        return Enum(UInt8(self, "type"), self.TYPE)
+
+    def getSampleBits(self):
+        return 8*(1+self["flags/16bits"].value)
+
+    def getInstrumentFields(self):
+        yield S3MUInt24(self, "sample_offset")
+        yield UInt32(self, "sample_size")
+        yield UInt32(self, "loop_begin")
+        yield UInt32(self, "loop_end")
+        yield UInt8(self, "volume")
+        yield UInt8(self, "reserved[]")
+        yield Enum(UInt8(self, "packing"), self.PACKING)
+        yield SampleFlags(self, "flags")
+        yield UInt32(self, "c4_speed", "Frequency for middle C note")
+        yield UInt32(self, "reserved[]", 4)
+        yield UInt16(self, "internal[]", "Sample address in GUS memory")
+        yield UInt16(self, "internal[]", "Flags for SoundBlaster loop expansion")
+        yield UInt32(self, "internal[]", "Last used position (SB)")
+
+    def getSubChunks(self):
+        size = self["sample_size"].value
+        if self["flags/stereo"].value: size *= 2
+        if self["flags/16bits"].value: size *= 2
+        yield Chunk(SampleData, "sample_data[]",
+                    self["sample_offset"].value, size)
+
+
+class PTMType(FieldSet):
+    TYPES = {0: "No sample", 1: "Regular", 2: "OPL2/OPL2 instrument", 3: "MIDI instrument" }
+    static_size = 8
+    def createFields(self):
+        yield Bits(self, "unused", 2)
+        yield Bit(self, "is_tonable")
+        yield Bit(self, "16bits")
+        yield Bit(self, "loop_bidir")
+        yield Bit(self, "loop")
+        yield Enum(Bits(self, "origin", 2), self.TYPES)
+
+##class PTMType(StaticFieldSet):
+##    format = (
+##        (Bits, "unused", 2),
+##        (Bit, "is_tonable"),
+##        (Bit, "16bits"),
+##        (Bit, "loop_bidir"),
+##        (Bit, "loop"),
+##        (Bits, "origin", 2),
+##    )
+
+class PTMInstrument(Instrument):
+    MAGIC = "PTMI"
+    ALIGN = 0
+
+    def getType(self):
+        return PTMType(self, "flags") # Hack to have more common code
+
+    # PTM doesn't pretend to manage 16bits
+    def getSampleBits(self):
+        return 8
+
+    def getInstrumentFields(self):
+        yield UInt8(self, "volume")
+        yield UInt16(self, "c4_speed")
+        yield UInt16(self, "sample_segment")
+        yield UInt32(self, "sample_offset")
+        yield UInt32(self, "sample_size")
+        yield UInt32(self, "loop_begin")
+        yield UInt32(self, "loop_end")
+        yield UInt32(self, "gus_begin")
+        yield UInt32(self, "gus_loop_start")
+        yield UInt32(self, "gus_loop_end")
+        yield textHandler(UInt8(self, "gus_loop_flags"), hexadecimal)
+        yield UInt8(self, "reserved[]") # Should be 0
+
+    def getSubChunks(self):
+        # Samples are NOT padded, and the size is already the correct one
+        size = self["sample_size"].value
+        if size:
+            yield Chunk(PTMSampleData, "sample_data[]", self["sample_offset"].value, size)
+
+
+class S3MNoteInfo(StaticFieldSet):
+    """
+0=end of row
+&31=channel
+&32=follows;  BYTE:note, BYTE:instrument
+&64=follows;  BYTE:volume
+&128=follows; BYTE:command, BYTE:info
+    """
+    format = (
+        (Bits, "channel", 5),
+        (Bit, "has_note"),
+        (Bit, "has_volume"),
+        (Bit, "has_effect")
+    )
+
+class PTMNoteInfo(StaticFieldSet):
+    format = (
+        (Bits, "channel", 5),
+        (Bit, "has_note"),
+        (Bit, "has_effect"),
+        (Bit, "has_volume")
+    )
+
+class Note(FieldSet):
+    def createFields(self):
+        # Used by Row to check if end of Row
+        info = self.NOTE_INFO(self, "info")
+        yield info
+        if info["has_note"].value:
+            yield UInt8(self, "note")
+            yield UInt8(self, "instrument")
+        if info["has_volume"].value:
+            yield UInt8(self, "volume")
+        if info["has_effect"].value:
+            yield UInt8(self, "effect")
+            yield UInt8(self, "param")
+
+class S3MNote(Note):
+    NOTE_INFO = S3MNoteInfo
+class PTMNote(Note):
+    NOTE_INFO = PTMNoteInfo
+
+class Row(FieldSet):
+    def createFields(self):
+        addr = self.absolute_address
+        while True:
+            # Check empty note
+            byte = self.stream.readBits(addr, 8, self.endian)
+            if not byte:
+                yield NullBytes(self, "terminator", 1)
+                return
+
+            note = self.NOTE(self, "note[]")
+            yield note
+            addr += note.size
+
+class S3MRow(Row):
+    NOTE = S3MNote
+class PTMRow(Row):
+    NOTE = PTMNote
+
+class Pattern(SizeFieldSet):
+    def createUnpaddedFields(self):
+        count = 0
+        while count < 64 and not self.eof:
+            yield self.ROW(self, "row[]")
+            count += 1
+
+class S3MPattern(Pattern):
+    ROW = S3MRow
+    def __init__(self, parent, name, size, desc=None):
+        Pattern.__init__(self, parent, name, size, desc)
+
+        # Get real_size from header
+        addr = self.absolute_address
+        size = self.stream.readBits(addr, 16, LITTLE_ENDIAN)
+        self.setCheckedSizes(size)
+
+class PTMPattern(Pattern):
+    ROW = PTMRow
+
+class Module(Parser):
+    # MARKER / HEADER are defined in derived classes
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        marker = self.stream.readBits(0x1C*8, 8, LITTLE_ENDIAN)
+        if marker != 0x1A:
+            return "Invalid start marker %u" % marker
+        marker = self.stream.readBytes(0x2C*8, 4)
+        if marker != self.MARKER:
+            return "Invalid marker %s!=%s" % (marker, self.MARKER)
+        return True
+
+    def createFields(self):
+        # Index chunks
+        indexer = ChunkIndexer()
+        # Add header - at least 0x50 bytes
+        indexer.addChunk(Chunk(self.HEADER, "header", 0, 0x50))
+        for field in indexer.yieldChunks(self):
+            yield field
+
+
+class S3MModule(Module):
+    PARSER_TAGS = {
+        "id": "s3m",
+        "category": "audio",
+        "file_ext": ("s3m",),
+        "mime": (u'audio/s3m', u'audio/x-s3m'),
+        "min_size": 64*8,
+        "description": "ScreamTracker3 module"
+    }
+    MARKER = "SCRM"
+    HEADER = S3MHeader
+
+##    def createContentSize(self):
+##        hdr = Header(self, "header")
+##        max_offset = hdr._size//8
+
+##        instr_size = Instrument._size//8
+##        for index in xrange(self["header/num_instruments"].value):
+##            offset = 16*hdr["instr_pptr/offset[%u]" % index].value
+##            max_offset = max(offset+instr_size, max_offset)
+##            addr = self.absolute_address + 8*offset
+
+class PTMModule(Module):
+    PARSER_TAGS = {
+        "id": "ptm",
+        "category": "audio",
+        "file_ext": ("ptm",),
+        "min_size": 64*8,
+        "description": "PolyTracker module (v1.17)"
+    }
+    MARKER = "PTMF"
+    HEADER = PTMHeader
diff --git a/lib/hachoir_parser/audio/xm.py b/lib/hachoir_parser/audio/xm.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b13b41f209c5800919cb6f4320656fccdaee97e
--- /dev/null
+++ b/lib/hachoir_parser/audio/xm.py
@@ -0,0 +1,390 @@
+"""
+Parser of FastTrackerII Extended Module (XM) version 1.4
+
+Documents:
+- Modplug source code (file modplug/soundlib/Load_xm.cpp)
+  http://sourceforge.net/projects/modplug
+- Dumb source code (files include/dumb.h and src/it/readxm.c
+  http://dumb.sf.net/
+- Documents of "XM" format on Wotsit
+  http://www.wotsit.org
+
+Author: Christophe GISQUET <christophe.gisquet@free.fr>
+Creation: 8th February 2007
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (StaticFieldSet, FieldSet,
+    Bit, RawBits, Bits,
+    UInt32, UInt16, UInt8, Int8, Enum,
+    RawBytes, String, GenericVector)
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal
+from hachoir_parser.audio.modplug import ParseModplugMetadata
+from hachoir_parser.common.tracker import NOTE_NAME
+
+def parseSigned(val):
+    return "%i" % (val.value-128)
+
+# From dumb
+SEMITONE_BASE = 1.059463094359295309843105314939748495817
+PITCH_BASE = 1.000225659305069791926712241547647863626
+
+SAMPLE_LOOP_MODE = ("No loop", "Forward loop", "Ping-pong loop", "Undef")
+
+class SampleType(FieldSet):
+    static_size = 8
+    def createFields(self):
+        yield Bits(self, "unused[]", 4)
+        yield Bit(self, "16bits")
+        yield Bits(self, "unused[]", 1)
+        yield Enum(Bits(self, "loop_mode", 2), SAMPLE_LOOP_MODE)
+
+class SampleHeader(FieldSet):
+    static_size = 40*8
+    def createFields(self):
+        yield UInt32(self, "length")
+        yield UInt32(self, "loop_start")
+        yield UInt32(self, "loop_end")
+        yield UInt8(self, "volume")
+        yield Int8(self, "fine_tune")
+        yield SampleType(self, "type")
+        yield UInt8(self, "panning")
+        yield Int8(self, "relative_note")
+        yield UInt8(self, "reserved")
+        yield String(self, "name", 22, charset="ASCII", strip=' \0')
+
+    def createValue(self):
+        bytes = 1+self["type/16bits"].value
+        C5_speed = int(16726.0*pow(SEMITONE_BASE, self["relative_note"].value)
+                       *pow(PITCH_BASE, self["fine_tune"].value*2))
+        return "%s, %ubits, %u samples, %uHz" % \
+               (self["name"].display, 8*bytes, self["length"].value/bytes, C5_speed)
+
+class StuffType(StaticFieldSet):
+    format = (
+        (Bits, "unused", 5),
+        (Bit, "loop"),
+        (Bit, "sustain"),
+        (Bit, "on")
+    )
+
+class InstrumentSecondHeader(FieldSet):
+    static_size = 234*8
+    def createFields(self):
+        yield UInt32(self, "sample_header_size")
+        yield GenericVector(self, "notes", 96, UInt8, "sample")
+        yield GenericVector(self, "volume_envelope", 24, UInt16, "point")
+        yield GenericVector(self, "panning_envelope", 24, UInt16, "point")
+        yield UInt8(self, "volume_points", r"Number of volume points")
+        yield UInt8(self, "panning_points", r"Number of panning points")
+        yield UInt8(self, "volume_sustain_point")
+        yield UInt8(self, "volume_loop_start_point")
+        yield UInt8(self, "volume_loop_end_point")
+        yield UInt8(self, "panning_sustain_point")
+        yield UInt8(self, "panning_loop_start_point")
+        yield UInt8(self, "panning_loop_end_point")
+        yield StuffType(self, "volume_type")
+        yield StuffType(self, "panning_type")
+        yield UInt8(self, "vibrato_type")
+        yield UInt8(self, "vibrato_sweep")
+        yield UInt8(self, "vibrato_depth")
+        yield UInt8(self, "vibrato_rate")
+        yield UInt16(self, "volume_fadeout")
+        yield GenericVector(self, "reserved", 11, UInt16, "word")
+
+def createInstrumentContentSize(s, addr):
+    start = addr
+    samples = s.stream.readBits(addr+27*8, 16, LITTLE_ENDIAN)
+    # Seek to end of header (1st + 2nd part)
+    addr += 8*s.stream.readBits(addr, 32, LITTLE_ENDIAN)
+
+    sample_size = 0
+    if samples:
+        for index in xrange(samples):
+            # Read the sample size from the header
+            sample_size += s.stream.readBits(addr, 32, LITTLE_ENDIAN)
+            # Seek to next sample header
+            addr += SampleHeader.static_size
+
+    return addr - start + 8*sample_size
+
+class Instrument(FieldSet):
+    def __init__(self, parent, name):
+        FieldSet.__init__(self, parent, name)
+        self._size = createInstrumentContentSize(self, self.absolute_address)
+        self.info(self.createDescription())
+
+    # Seems to fix things...
+    def fixInstrumentHeader(self):
+        size = self["size"].value - self.current_size//8
+        if size:
+            yield RawBytes(self, "unknown_data", size)
+
+    def createFields(self):
+        yield UInt32(self, "size")
+        yield String(self, "name", 22, charset="ASCII", strip=" \0")
+        # Doc says type is always 0, but I've found values of 24 and 96 for
+        # the _same_ song here, just different download sources for the file
+        yield UInt8(self, "type")
+        yield UInt16(self, "samples")
+        num = self["samples"].value
+        self.info(self.createDescription())
+
+        if num:
+            yield InstrumentSecondHeader(self, "second_header")
+
+            for field in self.fixInstrumentHeader():
+                yield field
+
+            # This part probably wrong
+            sample_size = [ ]
+            for index in xrange(num):
+                sample = SampleHeader(self, "sample_header[]")
+                yield sample
+                sample_size.append(sample["length"].value)
+
+            for size in sample_size:
+                if size:
+                    yield RawBytes(self, "sample_data[]", size, "Deltas")
+        else:
+            for field in self.fixInstrumentHeader():
+                yield field
+
+    def createDescription(self):
+        return "Instrument '%s': %i samples, header %i bytes" % \
+               (self["name"].value, self["samples"].value, self["size"].value)
+
+VOLUME_NAME = (
+    "Volume slide down", "Volume slide up", "Fine volume slide down",
+    "Fine volume slide up", "Set vibrato speed", "Vibrato",
+    "Set panning", "Panning slide left", "Panning slide right",
+    "Tone porta", "Unhandled")
+
+def parseVolume(val):
+    val = val.value
+    if 0x10<=val<=0x50:
+        return "Volume %i" % val-16
+    else:
+        return VOLUME_NAME[val/16 - 6]
+
+class RealBit(RawBits):
+    static_size = 1
+
+    def __init__(self, parent, name, description=None):
+        RawBits.__init__(self, parent, name, 1, description=description)
+
+    def createValue(self):
+        return self._parent.stream.readBits(self.absolute_address, 1, BIG_ENDIAN)
+
+class NoteInfo(StaticFieldSet):
+    format = (
+        (RawBits, "unused", 2),
+        (RealBit, "has_parameter"),
+        (RealBit, "has_type"),
+        (RealBit, "has_volume"),
+        (RealBit, "has_instrument"),
+        (RealBit, "has_note")
+    )
+
+EFFECT_NAME = (
+    "Arppegio", "Porta up", "Porta down", "Tone porta", "Vibrato",
+    "Tone porta+Volume slide", "Vibrato+Volume slide", "Tremolo",
+    "Set panning", "Sample offset", "Volume slide", "Position jump",
+    "Set volume", "Pattern break", None, "Set tempo/BPM",
+    "Set global volume", "Global volume slide", "Unused", "Unused",
+    "Unused", "Set envelope position", "Unused", "Unused",
+    "Panning slide", "Unused", "Multi retrig note", "Unused",
+    "Tremor", "Unused", "Unused", "Unused", None)
+
+EFFECT_E_NAME = (
+    "Unknown", "Fine porta up", "Fine porta down",
+    "Set gliss control", "Set vibrato control", "Set finetune",
+    "Set loop begin/loop", "Set tremolo control", "Retrig note",
+    "Fine volume slide up", "Fine volume slide down", "Note cut",
+    "Note delay", "Pattern delay")
+
+class Effect(RawBits):
+    def __init__(self, parent, name):
+        RawBits.__init__(self, parent, name, 8)
+
+    def createValue(self):
+        t = self.parent.stream.readBits(self.absolute_address, 8, LITTLE_ENDIAN)
+        param = self.parent.stream.readBits(self.absolute_address+8, 8, LITTLE_ENDIAN)
+        if t == 0x0E:
+            return EFFECT_E_NAME[param>>4] + " %i" % (param&0x07)
+        elif t == 0x21:
+            return ("Extra fine porta up", "Extra fine porta down")[param>>4]
+        else:
+            return EFFECT_NAME[t]
+
+class Note(FieldSet):
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self.flags = self.stream.readBits(self.absolute_address, 8, LITTLE_ENDIAN)
+        if self.flags&0x80:
+            # TODO: optimize bitcounting with a table:
+            # http://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetTable
+            self._size = 8
+            if self.flags&0x01: self._size += 8
+            if self.flags&0x02: self._size += 8
+            if self.flags&0x04: self._size += 8
+            if self.flags&0x08: self._size += 8
+            if self.flags&0x10: self._size += 8
+        else:
+            self._size = 5*8
+
+    def createFields(self):
+        # This stupid shit gets the LSB, not the MSB...
+        self.info("Note info: 0x%02X" %
+                  self.stream.readBits(self.absolute_address, 8, LITTLE_ENDIAN))
+        yield RealBit(self, "is_extended")
+        if self["is_extended"].value:
+            info = NoteInfo(self, "info")
+            yield info
+            if info["has_note"].value:
+                yield Enum(UInt8(self, "note"), NOTE_NAME)
+            if info["has_instrument"].value:
+                yield UInt8(self, "instrument")
+            if info["has_volume"].value:
+                yield textHandler(UInt8(self, "volume"), parseVolume)
+            if info["has_type"].value:
+                yield Effect(self, "effect_type")
+            if info["has_parameter"].value:
+                yield textHandler(UInt8(self, "effect_parameter"), hexadecimal)
+        else:
+            yield Enum(Bits(self, "note", 7), NOTE_NAME)
+            yield UInt8(self, "instrument")
+            yield textHandler(UInt8(self, "volume"), parseVolume)
+            yield Effect(self, "effect_type")
+            yield textHandler(UInt8(self, "effect_parameter"), hexadecimal)
+
+    def createDescription(self):
+        if "info" in self:
+            info = self["info"]
+            desc = []
+            if info["has_note"].value:
+                desc.append(self["note"].display)
+            if info["has_instrument"].value:
+                desc.append("instrument %i" % self["instrument"].value)
+            if info["has_volume"].value:
+                desc.append(self["has_volume"].display)
+            if info["has_type"].value:
+                desc.append("effect %s" % self["effect_type"].value)
+            if info["has_parameter"].value:
+                desc.append("parameter %i" % self["effect_parameter"].value)
+        else:
+            desc = (self["note"].display, "instrument %i" % self["instrument"].value,
+                self["has_volume"].display, "effect %s" % self["effect_type"].value,
+                "parameter %i" % self["effect_parameter"].value)
+        if desc:
+            return "Note %s" % ", ".join(desc)
+        else:
+            return "Note"
+
+class Row(FieldSet):
+    def createFields(self):
+        for index in xrange(self["/header/channels"].value):
+            yield Note(self, "note[]")
+
+def createPatternContentSize(s, addr):
+    return 8*(s.stream.readBits(addr, 32, LITTLE_ENDIAN) +
+              s.stream.readBits(addr+7*8, 16, LITTLE_ENDIAN))
+
+class Pattern(FieldSet):
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self._size = createPatternContentSize(self, self.absolute_address)
+
+    def createFields(self):
+        yield UInt32(self, "header_size", r"Header length (9)")
+        yield UInt8(self, "packing_type", r"Packing type (always 0)")
+        yield UInt16(self, "rows", r"Number of rows in pattern (1..256)")
+        yield UInt16(self, "data_size", r"Packed patterndata size")
+        rows = self["rows"].value
+        self.info("Pattern: %i rows" % rows)
+        for index in xrange(rows):
+            yield Row(self, "row[]")
+
+    def createDescription(self):
+        return "Pattern with %i rows" % self["rows"].value
+
+class Header(FieldSet):
+    MAGIC = "Extended Module: "
+    static_size = 336*8
+
+    def createFields(self):
+        yield String(self, "signature", 17, "XM signature", charset="ASCII")
+        yield String(self, "title", 20, "XM title", charset="ASCII", strip=' ')
+        yield UInt8(self, "marker", "Marker (0x1A)")
+        yield String(self, "tracker_name", 20, "XM tracker name", charset="ASCII", strip=' ')
+        yield UInt8(self, "format_minor")
+        yield UInt8(self, "format_major")
+        yield filesizeHandler(UInt32(self, "header_size", "Header size (276)"))
+        yield UInt16(self, "song_length", "Length in patten order table")
+        yield UInt16(self, "restart", "Restart position")
+        yield UInt16(self, "channels", "Number of channels (2,4,6,8,10,...,32)")
+        yield UInt16(self, "patterns", "Number of patterns (max 256)")
+        yield UInt16(self, "instruments", "Number of instruments (max 128)")
+        yield Bit(self, "amiga_ftable", "Amiga frequency table")
+        yield Bit(self, "linear_ftable", "Linear frequency table")
+        yield Bits(self, "unused", 14)
+        yield UInt16(self, "tempo", "Default tempo")
+        yield UInt16(self, "bpm", "Default BPM")
+        yield GenericVector(self, "pattern_order", 256, UInt8, "order")
+
+    def createDescription(self):
+        return "'%s' by '%s'" % (
+            self["title"].value, self["tracker_name"].value)
+
+class XMModule(Parser):
+    PARSER_TAGS = {
+        "id": "fasttracker2",
+        "category": "audio",
+        "file_ext": ("xm",),
+        "mime": (
+            u'audio/xm', u'audio/x-xm',
+            u'audio/module-xm', u'audio/mod', u'audio/x-mod'),
+        "magic": ((Header.MAGIC, 0),),
+        "min_size": Header.static_size +29*8, # Header + 1 empty instrument
+        "description": "FastTracker2 module"
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        header = self.stream.readBytes(0, 17)
+        if header != Header.MAGIC:
+            return "Invalid signature '%s'" % header
+        if self["/header/header_size"].value != 276:
+            return "Unknown header size (%u)" % self["/header/header_size"].value
+        return True
+
+    def createFields(self):
+        yield Header(self, "header")
+        for index in xrange(self["/header/patterns"].value):
+            yield Pattern(self, "pattern[]")
+        for index in xrange(self["/header/instruments"].value):
+            yield Instrument(self, "instrument[]")
+
+        # Metadata added by ModPlug - can be discarded
+        for field in ParseModplugMetadata(self):
+            yield field
+
+    def createContentSize(self):
+        # Header size
+        size = Header.static_size
+
+        # Add patterns size
+        for index in xrange(self["/header/patterns"].value):
+            size += createPatternContentSize(self, size)
+
+        # Add instruments size
+        for index in xrange(self["/header/instruments"].value):
+            size += createInstrumentContentSize(self, size)
+
+        # Not reporting Modplug metadata
+        return size
+
+    def createDescription(self):
+        return self["header"].description
+
diff --git a/lib/hachoir_parser/common/__init__.py b/lib/hachoir_parser/common/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/lib/hachoir_parser/common/deflate.py b/lib/hachoir_parser/common/deflate.py
new file mode 100644
index 0000000000000000000000000000000000000000..df9c2e0a021d8efb87d4013eca07ce4baad07729
--- /dev/null
+++ b/lib/hachoir_parser/common/deflate.py
@@ -0,0 +1,33 @@
+from hachoir_core.field import CompressedField
+
+try:
+    from zlib import decompressobj, MAX_WBITS
+
+    class DeflateStream:
+        def __init__(self, stream, wbits=None):
+            if wbits:
+                self.gzip = decompressobj(-MAX_WBITS)
+            else:
+                self.gzip = decompressobj()
+
+        def __call__(self, size, data=None):
+            if data is None:
+                data = self.gzip.unconsumed_tail
+            return self.gzip.decompress(data, size)
+
+    class DeflateStreamWbits(DeflateStream):
+        def __init__(self, stream):
+            DeflateStream.__init__(self, stream, True)
+
+    def Deflate(field, wbits=True):
+        if wbits:
+            CompressedField(field, DeflateStreamWbits)
+        else:
+            CompressedField(field, DeflateStream)
+        return field
+    has_deflate = True
+except ImportError:
+    def Deflate(field, wbits=True):
+        return field
+    has_deflate = False
+
diff --git a/lib/hachoir_parser/common/msdos.py b/lib/hachoir_parser/common/msdos.py
new file mode 100644
index 0000000000000000000000000000000000000000..addd149596171dc23a2e580310fddf0771026a4a
--- /dev/null
+++ b/lib/hachoir_parser/common/msdos.py
@@ -0,0 +1,62 @@
+"""
+MS-DOS structures.
+
+Documentation:
+- File attributes:
+  http://www.cs.colorado.edu/~main/cs1300/include/ddk/winddk.h
+"""
+
+from hachoir_core.field import StaticFieldSet
+from hachoir_core.field import Bit, NullBits
+
+_FIELDS = (
+    (Bit, "read_only"),
+    (Bit, "hidden"),
+    (Bit, "system"),
+    (NullBits, "reserved[]", 1),
+    (Bit, "directory"),
+    (Bit, "archive"),
+    (Bit, "device"),
+    (Bit, "normal"),
+    (Bit, "temporary"),
+    (Bit, "sparse_file"),
+    (Bit, "reparse_file"),
+    (Bit, "compressed"),
+    (Bit, "offline"),
+    (Bit, "dont_index_content"),
+    (Bit, "encrypted"),
+)
+
+class MSDOSFileAttr16(StaticFieldSet):
+    """
+    MSDOS 16-bit file attributes
+    """
+    format = _FIELDS + ((NullBits, "reserved[]", 1),)
+
+    _text_keys = (
+        # Sort attributes by importance
+        "directory", "read_only", "compressed",
+        "hidden", "system",
+        "normal", "device",
+        "temporary", "archive")
+
+    def createValue(self):
+        mode = []
+        for name in self._text_keys:
+            if self[name].value:
+                if 4 <= len(mode):
+                    mode.append("...")
+                    break
+                else:
+                    mode.append(name)
+        if mode:
+            return ", ".join(mode)
+        else:
+            return "(none)"
+
+class MSDOSFileAttr32(MSDOSFileAttr16):
+    """
+    MSDOS 32-bit file attributes
+    """
+    format = _FIELDS + ((NullBits, "reserved[]", 17),)
+
diff --git a/lib/hachoir_parser/common/tracker.py b/lib/hachoir_parser/common/tracker.py
new file mode 100644
index 0000000000000000000000000000000000000000..27a38e899927fa279881319e0462e40e32f1098c
--- /dev/null
+++ b/lib/hachoir_parser/common/tracker.py
@@ -0,0 +1,10 @@
+"""
+Shared code for tracker parser.
+"""
+
+NOTE_NAME = {}
+NOTES = ("C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "G#", "A", "A#", "B")
+for octave in xrange(10):
+    for index, note in enumerate(NOTES):
+        NOTE_NAME[octave*12+index] = "%s (octave %s)" % (note, octave)
+
diff --git a/lib/hachoir_parser/common/win32.py b/lib/hachoir_parser/common/win32.py
new file mode 100644
index 0000000000000000000000000000000000000000..177190ec0d9f3f73d307c3fbcdabbc8ee829672b
--- /dev/null
+++ b/lib/hachoir_parser/common/win32.py
@@ -0,0 +1,154 @@
+from hachoir_core.field import (FieldSet,
+    UInt16, UInt32, Enum, String, Bytes, Bits, TimestampUUID60)
+from hachoir_parser.video.fourcc import video_fourcc_name
+from hachoir_core.bits import str2hex
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_parser.network.common import MAC48_Address
+
+# Dictionary: Windows codepage => Python charset name
+CODEPAGE_CHARSET = {
+      874: "CP874",
+#     932: Japanese Shift-JIS
+#     936: Simplified Chinese GBK
+#     949: Korean
+#     950: Traditional Chinese Big5
+     1250: "WINDOWS-1250",
+     1251: "WINDOWS-1251",
+     1252: "WINDOWS-1252",
+     1253: "WINDOWS-1253",
+     1254: "WINDOWS-1254",
+     1255: "WINDOWS-1255",
+     1256: "WINDOWS-1256",
+     1257: "WINDOWS-1257",
+     1258: "WINDOWS-1258",
+    65001: "UTF-8",
+}
+
+class PascalStringWin32(FieldSet):
+    def __init__(self, parent, name, description=None, strip=None, charset="UTF-16-LE"):
+        FieldSet.__init__(self, parent, name, description)
+        length = self["length"].value
+        self._size = 32 + length * 16
+        self.strip = strip
+        self.charset = charset
+
+    def createFields(self):
+        yield UInt32(self, "length", "Length in widechar characters")
+        size = self["length"].value
+        if size:
+            yield String(self, "text", size*2, charset=self.charset, strip=self.strip)
+
+    def createValue(self):
+        if "text" in self:
+            return self["text"].value
+        else:
+            return None
+
+class GUID(FieldSet):
+    """
+    Windows 128 bits Globally Unique Identifier (GUID)
+
+    See RFC 4122
+    """
+    static_size = 128
+    NULL = "00000000-0000-0000-0000-000000000000"
+    FIELD_NAMES = {
+        3: ("sha1_high", "sha1_low"),
+        4: ("random_high", "random_low"),
+        5: ("md5_high", "md5_low"),
+    }
+    VERSION_NAME = {
+        1: "Timestamp & MAC-48",
+        2: "DCE Security version",
+        3: "Name SHA-1 hash",
+        4: "Randomly generated",
+        5: "Name MD5 hash",
+    }
+    VARIANT_NAME = {
+        0: "NCS",
+        2: "Leach-Salz",
+       # 5: Microsoft Corporation?
+        6: "Microsoft Corporation",
+        7: "Reserved Future",
+    }
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self.version = self.stream.readBits(self.absolute_address + 32 + 16 + 12, 4, self.endian)
+
+    def createFields(self):
+        if self.version == 1:
+            yield TimestampUUID60(self, "time")
+            yield Enum(Bits(self, "version", 4), self.VERSION_NAME)
+            yield Enum(Bits(self, "variant", 3), self.VARIANT_NAME)
+            yield textHandler(Bits(self, "clock", 13), hexadecimal)
+#            yield textHandler(Bits(self, "clock", 16), hexadecimal)
+            if self.version == 1:
+                yield MAC48_Address(self, "mac", "IEEE 802 MAC address")
+            else:
+                yield Bytes(self, "node", 6)
+        else:
+            namea, nameb = self.FIELD_NAMES.get(
+                self.version, ("data_a", "data_b"))
+            yield textHandler(Bits(self, namea, 60), hexadecimal)
+            yield Enum(Bits(self, "version", 4), self.VERSION_NAME)
+            yield Enum(Bits(self, "variant", 3), self.VARIANT_NAME)
+            yield textHandler(Bits(self, nameb, 61), hexadecimal)
+
+    def createValue(self):
+        addr = self.absolute_address
+        a = self.stream.readBits (addr,      32, self.endian)
+        b = self.stream.readBits (addr + 32, 16, self.endian)
+        c = self.stream.readBits (addr + 48, 16, self.endian)
+        d = self.stream.readBytes(addr + 64, 2)
+        e = self.stream.readBytes(addr + 80, 6)
+        return "%08X-%04X-%04X-%s-%s" % (a, b, c, str2hex(d), str2hex(e))
+
+    def createDisplay(self):
+        value = self.value
+        if value == self.NULL:
+            name = "Null GUID: "
+        else:
+            name = "GUID v%u (%s): " % (self.version, self["version"].display)
+        return name + value
+
+    def createRawDisplay(self):
+        value = self.stream.readBytes(self.absolute_address, 16)
+        return str2hex(value, format=r"\x%02x")
+
+class BitmapInfoHeader(FieldSet):
+    """ Win32 BITMAPINFOHEADER structure from GDI """
+    static_size = 40*8
+
+    COMPRESSION_NAME = {
+        0: u"Uncompressed (RGB)",
+        1: u"RLE (8 bits)",
+        2: u"RLE (4 bits)",
+        3: u"Bitfields",
+        4: u"JPEG",
+        5: u"PNG"
+    }
+
+    def __init__(self, parent, name, use_fourcc=False):
+        FieldSet.__init__(self, parent, name)
+        self._use_fourcc = use_fourcc
+
+    def createFields(self):
+        yield UInt32(self, "hdr_size", "Header size (in bytes) (=40)")
+        yield UInt32(self, "width", "Width")
+        yield UInt32(self, "height", "Height")
+        yield UInt16(self, "nb_planes", "Color planes")
+        yield UInt16(self, "bpp", "Bits/pixel")
+        if self._use_fourcc:
+            yield Enum(String(self, "codec", 4, charset="ASCII"), video_fourcc_name)
+        else:
+            yield Enum(UInt32(self, "codec", "Compression"), self.COMPRESSION_NAME)
+        yield UInt32(self, "size", "Image size (in bytes)")
+        yield UInt32(self, "xres", "X pixels per meter")
+        yield UInt32(self, "yres", "Y pixels per meter")
+        yield UInt32(self, "color_used", "Number of used colors")
+        yield UInt32(self, "color_important", "Number of important colors")
+
+    def createDescription(self):
+        return "Bitmap info header: %ux%u pixels, %u bits/pixel" % \
+            (self["width"].value, self["height"].value, self["bpp"].value)
+
diff --git a/lib/hachoir_parser/common/win32_lang_id.py b/lib/hachoir_parser/common/win32_lang_id.py
new file mode 100644
index 0000000000000000000000000000000000000000..a5da66f874ad75d323beb18ba6a8a4c057ace7b1
--- /dev/null
+++ b/lib/hachoir_parser/common/win32_lang_id.py
@@ -0,0 +1,136 @@
+"""
+Windows 2000 - List of Locale IDs and Language Groups
+
+Original data table:
+http://www.microsoft.com/globaldev/reference/win2k/setup/lcid.mspx
+"""
+
+LANGUAGE_ID = {
+    0x0436: u"Afrikaans",
+    0x041c: u"Albanian",
+    0x0401: u"Arabic Saudi Arabia",
+    0x0801: u"Arabic Iraq",
+    0x0c01: u"Arabic Egypt",
+    0x1001: u"Arabic Libya",
+    0x1401: u"Arabic Algeria",
+    0x1801: u"Arabic Morocco",
+    0x1c01: u"Arabic Tunisia",
+    0x2001: u"Arabic Oman",
+    0x2401: u"Arabic Yemen",
+    0x2801: u"Arabic Syria",
+    0x2c01: u"Arabic Jordan",
+    0x3001: u"Arabic Lebanon",
+    0x3401: u"Arabic Kuwait",
+    0x3801: u"Arabic UAE",
+    0x3c01: u"Arabic Bahrain",
+    0x4001: u"Arabic Qatar",
+    0x042b: u"Armenian",
+    0x042c: u"Azeri Latin",
+    0x082c: u"Azeri Cyrillic",
+    0x042d: u"Basque",
+    0x0423: u"Belarusian",
+    0x0402: u"Bulgarian",
+    0x0403: u"Catalan",
+    0x0404: u"Chinese Taiwan",
+    0x0804: u"Chinese PRC",
+    0x0c04: u"Chinese Hong Kong",
+    0x1004: u"Chinese Singapore",
+    0x1404: u"Chinese Macau",
+    0x041a: u"Croatian",
+    0x0405: u"Czech",
+    0x0406: u"Danish",
+    0x0413: u"Dutch Standard",
+    0x0813: u"Dutch Belgian",
+    0x0409: u"English United States",
+    0x0809: u"English United Kingdom",
+    0x0c09: u"English Australian",
+    0x1009: u"English Canadian",
+    0x1409: u"English New Zealand",
+    0x1809: u"English Irish",
+    0x1c09: u"English South Africa",
+    0x2009: u"English Jamaica",
+    0x2409: u"English Caribbean",
+    0x2809: u"English Belize",
+    0x2c09: u"English Trinidad",
+    0x3009: u"English Zimbabwe",
+    0x3409: u"English Philippines",
+    0x0425: u"Estonian",
+    0x0438: u"Faeroese",
+    0x0429: u"Farsi",
+    0x040b: u"Finnish",
+    0x040c: u"French Standard",
+    0x080c: u"French Belgian",
+    0x0c0c: u"French Canadian",
+    0x100c: u"French Swiss",
+    0x140c: u"French Luxembourg",
+    0x180c: u"French Monaco",
+    0x0437: u"Georgian",
+    0x0407: u"German Standard",
+    0x0807: u"German Swiss",
+    0x0c07: u"German Austrian",
+    0x1007: u"German Luxembourg",
+    0x1407: u"German Liechtenstein",
+    0x0408: u"Greek",
+    0x040d: u"Hebrew",
+    0x0439: u"Hindi",
+    0x040e: u"Hungarian",
+    0x040f: u"Icelandic",
+    0x0421: u"Indonesian",
+    0x0410: u"Italian Standard",
+    0x0810: u"Italian Swiss",
+    0x0411: u"Japanese",
+    0x043f: u"Kazakh",
+    0x0457: u"Konkani",
+    0x0412: u"Korean",
+    0x0426: u"Latvian",
+    0x0427: u"Lithuanian",
+    0x042f: u"Macedonian",
+    0x043e: u"Malay Malaysia",
+    0x083e: u"Malay Brunei Darussalam",
+    0x044e: u"Marathi",
+    0x0414: u"Norwegian Bokmal",
+    0x0814: u"Norwegian Nynorsk",
+    0x0415: u"Polish",
+    0x0416: u"Portuguese Brazilian",
+    0x0816: u"Portuguese Standard",
+    0x0418: u"Romanian",
+    0x0419: u"Russian",
+    0x044f: u"Sanskrit",
+    0x081a: u"Serbian Latin",
+    0x0c1a: u"Serbian Cyrillic",
+    0x041b: u"Slovak",
+    0x0424: u"Slovenian",
+    0x040a: u"Spanish Traditional Sort",
+    0x080a: u"Spanish Mexican",
+    0x0c0a: u"Spanish Modern Sort",
+    0x100a: u"Spanish Guatemala",
+    0x140a: u"Spanish Costa Rica",
+    0x180a: u"Spanish Panama",
+    0x1c0a: u"Spanish Dominican Republic",
+    0x200a: u"Spanish Venezuela",
+    0x240a: u"Spanish Colombia",
+    0x280a: u"Spanish Peru",
+    0x2c0a: u"Spanish Argentina",
+    0x300a: u"Spanish Ecuador",
+    0x340a: u"Spanish Chile",
+    0x380a: u"Spanish Uruguay",
+    0x3c0a: u"Spanish Paraguay",
+    0x400a: u"Spanish Bolivia",
+    0x440a: u"Spanish El Salvador",
+    0x480a: u"Spanish Honduras",
+    0x4c0a: u"Spanish Nicaragua",
+    0x500a: u"Spanish Puerto Rico",
+    0x0441: u"Swahili",
+    0x041d: u"Swedish",
+    0x081d: u"Swedish Finland",
+    0x0449: u"Tamil",
+    0x0444: u"Tatar",
+    0x041e: u"Thai",
+    0x041f: u"Turkish",
+    0x0422: u"Ukrainian",
+    0x0420: u"Urdu",
+    0x0443: u"Uzbek Latin",
+    0x0843: u"Uzbek Cyrillic",
+    0x042a: u"Vietnamese",
+}
+
diff --git a/lib/hachoir_parser/container/__init__.py b/lib/hachoir_parser/container/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..6fd7d3edae2156a4cd43e5c3dbadd7bb392d47dd
--- /dev/null
+++ b/lib/hachoir_parser/container/__init__.py
@@ -0,0 +1,7 @@
+from hachoir_parser.container.asn1 import ASN1File
+from hachoir_parser.container.mkv import MkvFile
+from hachoir_parser.container.ogg import OggFile, OggStream
+from hachoir_parser.container.riff import RiffFile
+from hachoir_parser.container.swf import SwfFile
+from hachoir_parser.container.realmedia import RealMediaFile
+
diff --git a/lib/hachoir_parser/container/action_script.py b/lib/hachoir_parser/container/action_script.py
new file mode 100644
index 0000000000000000000000000000000000000000..6769ac114ea4f988c62a858dc06f037dbe784835
--- /dev/null
+++ b/lib/hachoir_parser/container/action_script.py
@@ -0,0 +1,316 @@
+"""
+SWF (Macromedia/Adobe Flash) file parser.
+
+Documentation:
+
+ - Alexis' SWF Reference:
+   http://www.m2osw.com/swf_alexref.html
+
+Author: Sebastien Ponce
+Creation date: 26 April 2008
+"""
+
+from hachoir_core.field import (FieldSet, ParserError,
+    Bit, Bits, UInt8, UInt32, Int16, UInt16, Float32, CString,
+    RawBytes)
+#from hachoir_core.field import Field
+from hachoir_core.field.float import FloatExponent
+from struct import unpack
+
+class FlashFloat64(FieldSet):
+    def createFields(self):
+        yield Bits(self, "mantisa_high", 20)
+        yield FloatExponent(self, "exponent", 11)
+        yield Bit(self, "negative")
+        yield Bits(self, "mantisa_low", 32)
+
+    def createValue(self):
+        # Manual computation:
+        # mantisa = mantisa_high * 2^32 + mantisa_low
+        # float = 2^exponent + (1 + mantisa / 2^52)
+        # (and float is negative if negative=True)
+        bytes = self.parent.stream.readBytes(
+            self.absolute_address, self.size//8)
+        # Mix bytes: xxxxyyyy <=> yyyyxxxx
+        bytes = bytes[4:8] + bytes[0:4]
+        return unpack('<d', bytes)[0]
+
+TYPE_INFO = {
+    0x00: (CString, "Cstring[]"),
+    0x01: (Float32, "Float[]"),
+    0x02: (None, "Null[]"),
+    0x03: (None, "Undefined[]"),
+    0x04: (UInt8, "Register[]"),
+    0x05: (UInt8, "Boolean[]"),
+    0x06: (FlashFloat64, "Double[]"),
+    0x07: (UInt32, "Integer[]"),
+    0x08: (UInt8, "Dictionnary_Lookup_Index[]"),
+    0x09: (UInt16, "Large_Dictionnary_Lookup_Index[]"),
+}
+
+def parseBranch(parent, size):
+    yield Int16(parent, "offset")
+
+def parseDeclareFunction(parent, size):
+    yield CString(parent, "name")
+    argCount = UInt16(parent, "arg_count")
+    yield argCount
+    for i in range(argCount.value):
+        yield CString(parent, "arg[]")
+    yield UInt16(parent, "function_length")
+
+def parseDeclareFunctionV7(parent, size):
+    yield CString(parent, "name")
+    argCount = UInt16(parent, "arg_count")
+    yield argCount
+    yield UInt8(parent, "reg_count")
+    yield Bits(parent, "reserved", 7)
+    yield Bit(parent, "preload_global")
+    yield Bit(parent, "preload_parent")
+    yield Bit(parent, "preload_root")
+    yield Bit(parent, "suppress_super")
+    yield Bit(parent, "preload_super")
+    yield Bit(parent, "suppress_arguments")
+    yield Bit(parent, "preload_arguments")
+    yield Bit(parent, "suppress_this")
+    yield Bit(parent, "preload_this")
+    for i in range(argCount.value):
+        yield UInt8(parent, "register[]")
+        yield CString(parent, "arg[]")
+    yield UInt16(parent, "function_length")
+
+def parseTry(parent, size):
+    yield Bits(parent, "reserved", 5)
+    catchInReg = Bit(parent, "catch_in_register")
+    yield catchInReg
+    yield Bit(parent, "finally")
+    yield Bit(parent, "catch")
+    yield UInt8(parent, "try_size")
+    yield UInt8(parent, "catch_size")
+    yield UInt8(parent, "finally_size")
+    if catchInReg.value:
+        yield CString(parent, "name")
+    else:
+        yield UInt8(parent, "register")
+
+def parsePushData(parent, size):
+    while not parent.eof:
+        codeobj = UInt8(parent, "data_type[]")
+        yield codeobj
+        code = codeobj.value
+        if code not in TYPE_INFO:
+            raise ParserError("Unknown type in Push_Data : " + hex(code))
+        parser, name = TYPE_INFO[code]
+        if parser:
+            yield parser(parent, name)
+#        else:
+#            yield Field(parent, name, 0)
+
+def parseSetTarget(parent, size):
+    yield CString(parent, "target")
+
+def parseWith(parent, size):
+    yield UInt16(parent, "size")
+
+def parseGetURL(parent, size):
+    yield CString(parent, "url")
+    yield CString(parent, "target")
+
+def parseGetURL2(parent, size):
+    yield UInt8(parent, "method")
+
+def parseGotoExpression(parent, size):
+    yield UInt8(parent, "play")
+
+def parseGotoFrame(parent, size):
+    yield UInt16(parent, "frame_no")
+
+def parseGotoLabel(parent, size):
+    yield CString(parent, "label")
+
+def parseWaitForFrame(parent, size):
+    yield UInt16(parent, "frame")
+    yield UInt8(parent, "skip")
+
+def parseWaitForFrameDyn(parent, size):
+    yield UInt8(parent, "skip")
+
+def parseDeclareDictionnary(parent, size):
+    count = UInt16(parent, "count")
+    yield count
+    for i in range(count.value):
+        yield CString(parent, "dictionnary[]")
+
+def parseStoreRegister(parent, size):
+    yield UInt8(parent, "register")
+
+def parseStrictMode(parent, size):
+    yield UInt8(parent, "strict")
+
+class Instruction(FieldSet):
+    ACTION_INFO = {
+        0x00: ("end[]", "End", None),
+        0x99: ("Branch_Always[]", "Branch Always", parseBranch),
+        0x9D: ("Branch_If_True[]", "Branch If True", parseBranch),
+        0x3D: ("Call_Function[]", "Call Function", None),
+        0x52: ("Call_Method[]", "Call Method", None),
+        0x9B: ("Declare_Function[]", "Declare Function", parseDeclareFunction),
+        0x8E: ("Declare_Function_V7[]", "Declare Function (V7)", parseDeclareFunctionV7),
+        0x3E: ("Return[]", "Return", None),
+        0x2A: ("Throw[]", "Throw", None),
+        0x8F: ("Try[]", "Try", parseTry),
+        # Stack Control
+        0x4C: ("Duplicate[]", "Duplicate", None),
+        0x96: ("Push_Data[]", "Push Data", parsePushData),
+        0x4D: ("Swap[]", "Swap", None),
+        # Action Script Context
+        0x8B: ("Set_Target[]", "Set Target", parseSetTarget),
+        0x20: ("Set_Target_dynamic[]", "Set Target (dynamic)", None),
+        0x94: ("With[]", "With", parseWith),
+        # Movie Control
+        0x9E: ("Call_Frame[]", "Call Frame", None),
+        0x83: ("Get_URL[]", "Get URL", parseGetURL),
+        0x9A: ("Get_URL2[]", "Get URL2", parseGetURL2),
+        0x9F: ("Goto_Expression[]", "Goto Expression", parseGotoExpression),
+        0x81: ("Goto_Frame[]", "Goto Frame", parseGotoFrame),
+        0x8C: ("Goto_Label[]", "Goto Label", parseGotoLabel),
+        0x04: ("Next_Frame[]", "Next Frame", None),
+        0x06: ("Play[]", "Play", None),
+        0x05: ("Previous_Frame[]", "Previous Frame", None),
+        0x07: ("Stop[]", "Stop", None),
+        0x08: ("Toggle_Quality[]", "Toggle Quality", None),
+        0x8A: ("Wait_For_Frame[]", "Wait For Frame", parseWaitForFrame),
+        0x8D: ("Wait_For_Frame_dynamic[]", "Wait For Frame (dynamic)", parseWaitForFrameDyn),
+        # Sound
+        0x09: ("Stop_Sound[]", "Stop Sound", None),
+        # Arithmetic
+        0x0A: ("Add[]", "Add", None),
+        0x47: ("Add_typed[]", "Add (typed)", None),
+        0x51: ("Decrement[]", "Decrement", None),
+        0x0D: ("Divide[]", "Divide", None),
+        0x50: ("Increment[]", "Increment", None),
+        0x18: ("Integral_Part[]", "Integral Part", None),
+        0x3F: ("Modulo[]", "Modulo", None),
+        0x0C: ("Multiply[]", "Multiply", None),
+        0x4A: ("Number[]", "Number", None),
+        0x0B: ("Subtract[]", "Subtract", None),
+        # Comparisons
+        0x0E: ("Equal[]", "Equal", None),
+        0x49: ("Equal_typed[]", "Equal (typed)", None),
+        0x66: ("Strict_Equal[]", "Strict Equal", None),
+        0x67: ("Greater_Than_typed[]", "Greater Than (typed)", None),
+        0x0F: ("Less_Than[]", "Less Than", None),
+        0x48: ("Less_Than_typed[]", "Less Than (typed)", None),
+        0x13: ("String_Equal[]", "String Equal", None),
+        0x68: ("String_Greater_Than[]", "String Greater Than", None),
+        0x29: ("String_Less_Than[]", "String Less Than", None),
+        # Logical and Bit Wise
+        0x60: ("And[]", "And", None),
+        0x10: ("Logical_And[]", "Logical And", None),
+        0x12: ("Logical_Not[]", "Logical Not", None),
+        0x11: ("Logical_Or[]", "Logical Or", None),
+        0x61: ("Or[]", "Or", None),
+        0x63: ("Shift_Left[]", "Shift Left", None),
+        0x64: ("Shift_Right[]", "Shift Right", None),
+        0x65: ("Shift_Right_Unsigned[]", "Shift Right Unsigned", None),
+        0x62: ("Xor[]", "Xor", None),
+        # Strings & Characters (See the String Object also)
+        0x33: ("Chr[]", "Chr", None),
+        0x37: ("Chr_multi-bytes[]", "Chr (multi-bytes)", None),
+        0x21: ("Concatenate_Strings[]", "Concatenate Strings", None),
+        0x32: ("Ord[]", "Ord", None),
+        0x36: ("Ord_multi-bytes[]", "Ord (multi-bytes)", None),
+        0x4B: ("String[]", "String", None),
+        0x14: ("String_Length[]", "String Length", None),
+        0x31: ("String_Length_multi-bytes[]", "String Length (multi-bytes)", None),
+        0x15: ("SubString[]", "SubString", None),
+        0x35: ("SubString_multi-bytes[]", "SubString (multi-bytes)", None),
+        # Properties
+        0x22: ("Get_Property[]", "Get Property", None),
+        0x23: ("Set_Property[]", "Set Property", None),
+        # Objects
+        0x2B: ("Cast_Object[]", "Cast Object", None),
+        0x42: ("Declare_Array[]", "Declare Array", None),
+        0x88: ("Declare_Dictionary[]", "Declare Dictionary", parseDeclareDictionnary),
+        0x43: ("Declare_Object[]", "Declare Object", None),
+        0x3A: ("Delete[]", "Delete", None),
+        0x3B: ("Delete_All[]", "Delete All", None),
+        0x24: ("Duplicate_Sprite[]", "Duplicate Sprite", None),
+        0x46: ("Enumerate[]", "Enumerate", None),
+        0x55: ("Enumerate_Object[]", "Enumerate Object", None),
+        0x69: ("Extends[]", "Extends", None),
+        0x4E: ("Get_Member[]", "Get Member", None),
+        0x45: ("Get_Target[]", "Get Target", None),
+        0x2C: ("Implements[]", "Implements", None),
+        0x54: ("Instance_Of[]", "Instance Of", None),
+        0x40: ("New[]", "New", None),
+        0x53: ("New_Method[]", "New Method", None),
+        0x25: ("Remove_Sprite[]", "Remove Sprite", None),
+        0x4F: ("Set_Member[]", "Set Member", None),
+        0x44: ("Type_Of[]", "Type Of", None),
+        # Variables
+        0x41: ("Declare_Local_Variable[]", "Declare Local Variable", None),
+        0x1C: ("Get_Variable[]", "Get Variable", None),
+        0x3C: ("Set_Local_Variable[]", "Set Local Variable", None),
+        0x1D: ("Set_Variable[]", "Set Variable", None),
+        # Miscellaneous
+        0x2D: ("FSCommand2[]", "FSCommand2", None),
+        0x34: ("Get_Timer[]", "Get Timer", None),
+        0x30: ("Random[]", "Random", None),
+        0x27: ("Start_Drag[]", "Start Drag", None),
+        0x28: ("Stop_Drag[]", "Stop Drag", None),
+        0x87: ("Store_Register[]", "Store Register", parseStoreRegister),
+        0x89: ("Strict_Mode[]", "Strict Mode", parseStrictMode),
+        0x26: ("Trace[]", "Trace", None),
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        code = self["action_id"].value
+        if code & 128:
+            self._size = (3 + self["action_length"].value) * 8
+        else:
+            self._size = 8
+        if code in self.ACTION_INFO:
+            self._name, self._description, self.parser = self.ACTION_INFO[code]
+        else:
+            self.parser = None
+
+    def createFields(self):
+        yield Bits(self, "action_id", 8)
+        if not (self["action_id"].value & 128):
+            return
+        yield UInt16(self, "action_length")
+        size = self["action_length"].value
+        if not size:
+            return
+        if self.parser:
+            for field in self.parser(self, size):
+                yield field
+        else:
+            yield RawBytes(self, "action_data", size)
+
+    def createDescription(self):
+        return self._description
+
+    def __str__(self):
+        r = str(self._description)
+        for f in self:
+            if f.name not in ("action_id", "action_length", "count") and not f.name.startswith("data_type") :
+                r = r + "\n   " + str((self.address+f.address)/8) + " " + str(f.name) + "=" + str(f.value)
+        return r
+
+class ActionScript(FieldSet):
+    def createFields(self):
+        while not self.eof:
+            yield Instruction(self, "instr[]")
+
+    def __str__(self):
+        r = ""
+        for f in self:
+            r = r + str(f.address/8) + " " + str(f) + "\n"
+        return r
+
+def parseActionScript(parent, size):
+    yield ActionScript(parent, "action", size=size*8)
+
diff --git a/lib/hachoir_parser/container/asn1.py b/lib/hachoir_parser/container/asn1.py
new file mode 100644
index 0000000000000000000000000000000000000000..dfac847b314453a4a3ae7e4bde178bbc2f07b0db
--- /dev/null
+++ b/lib/hachoir_parser/container/asn1.py
@@ -0,0 +1,282 @@
+"""
+Abstract Syntax Notation One (ASN.1) parser.
+
+Technical informations:
+* PER standard
+  http://www.tu.int/ITU-T/studygroups/com17/languages/X.691-0207.pdf
+* Python library
+  http://pyasn1.sourceforge.net/
+* Specification of Abstract Syntax Notation One (ASN.1)
+  ISO/IEC 8824:1990 Information Technology
+* Specification of Basic Encoding Rules (BER) for ASN.1
+  ISO/IEC 8825:1990 Information Technology
+* OpenSSL asn1parser, use command:
+  openssl asn1parse -i -inform DER -in file.der
+* ITU-U recommendations:
+  http://www.itu.int/rec/T-REC-X/en
+  (X.680, X.681, X.682, X.683, X.690, X.691, X.692, X.693, X.694)
+* dumpasn1
+  http://www.cs.auckland.ac.nz/~pgut001/dumpasn1.c
+
+General information:
+* Wikipedia (english) article
+  http://en.wikipedia.org/wiki/Abstract_Syntax_Notation_One
+* ASN.1 information site
+  http://asn1.elibel.tm.fr/en/
+* ASN.1 consortium
+  http://www.asn1.org/
+
+Encodings:
+* Basic Encoding Rules (BER)
+* Canonical Encoding Rules (CER) -- DER derivative that is not widely used
+* Distinguished Encoding Rules (DER) -- used for encrypted applications
+* XML Encoding Rules (XER)
+* Packed Encoding Rules (PER) -- result in the fewest number of bytes
+* Generic String Encoding Rules (GSER)
+=> Are encodings compatibles? Which encodings are supported??
+
+Author: Victor Stinner
+Creation date: 24 september 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    FieldError, ParserError,
+    Bit, Bits, Bytes, UInt8, GenericInteger, String,
+    Field, Enum, RawBytes)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.tools import createDict, humanDatetime
+from hachoir_core.stream import InputStreamError
+from hachoir_core.text_handler import textHandler
+
+# --- Field parser ---
+
+class ASNInteger(Field):
+    """
+    Integer: two cases:
+    - first byte in 0..127: it's the value
+    - first byte in 128..255: byte & 127 is the number of bytes,
+      next bytes are the value
+    """
+    def __init__(self, parent, name, description=None):
+        Field.__init__(self, parent, name, 8, description)
+        stream = self._parent.stream
+        addr = self.absolute_address
+        value = stream.readBits(addr, 8, BIG_ENDIAN)
+        if 128 <= value:
+            nbits = (value & 127) * 8
+            if not nbits:
+                raise ParserError("ASN.1: invalid ASN integer size (zero)")
+            if 64 < nbits:
+                # Arbitrary limit to catch errors
+                raise ParserError("ASN.1: ASN integer is limited to 64 bits")
+            self._size = 8 + nbits
+            value = stream.readBits(addr+8, nbits, BIG_ENDIAN)
+        self.createValue = lambda: value
+
+class OID_Integer(Bits):
+    def __init__(self, parent, name, description=None):
+        Bits.__init__(self, parent, name, 8, description)
+        stream = self._parent.stream
+        addr = self.absolute_address
+        size = 8
+        value = 0
+        byte = stream.readBits(addr, 8, BIG_ENDIAN)
+        value = byte & 127
+        while 128 <= byte:
+            addr += 8
+            size += 8
+            if 64 < size:
+                # Arbitrary limit to catch errors
+                raise ParserError("ASN.1: Object identifier is limited 64 bits")
+            byte = stream.readBits(addr, 8, BIG_ENDIAN)
+            value = (value << 7) + (byte & 127)
+        self._size = size
+        self.createValue = lambda: value
+
+def readSequence(self, content_size):
+    while self.current_size < self.size:
+        yield Object(self, "item[]")
+
+def readSet(self, content_size):
+    yield Object(self, "value", size=content_size*8)
+
+def readASCIIString(self, content_size):
+    yield String(self, "value", content_size, charset="ASCII")
+
+def readUTF8String(self, content_size):
+    yield String(self, "value", content_size, charset="UTF-8")
+
+def readBMPString(self, content_size):
+    yield String(self, "value", content_size, charset="UTF-16")
+
+def readBitString(self, content_size):
+    yield UInt8(self, "padding_size", description="Number of unused bits")
+    if content_size > 1:
+        yield Bytes(self, "value", content_size-1)
+
+def readOctetString(self, content_size):
+    yield Bytes(self, "value", content_size)
+
+def formatObjectID(fieldset):
+    text = [ fieldset["first"].display ]
+    items = [ field for field in fieldset if field.name.startswith("item[") ]
+    text.extend( str(field.value) for field in items )
+    return ".".join(text)
+
+def readObjectID(self, content_size):
+    yield textHandler(UInt8(self, "first"), formatFirstObjectID)
+    while self.current_size < self.size:
+        yield OID_Integer(self, "item[]")
+
+def readBoolean(self, content_size):
+    if content_size != 1:
+        raise ParserError("Overlong boolean: got %s bytes, expected 1 byte"%content_size)
+    yield textHandler(UInt8(self, "value"), lambda field:str(bool(field.value)))
+
+def readInteger(self, content_size):
+    # Always signed?
+    yield GenericInteger(self, "value", True, content_size*8)
+
+# --- Format ---
+
+def formatFirstObjectID(field):
+    value = field.value
+    return "%u.%u" % (value // 40, value % 40)
+
+def formatValue(fieldset):
+    return fieldset["value"].display
+
+def formatUTCTime(fieldset):
+    import datetime
+    value = fieldset["value"].value
+    year = int(value[0:2])
+    if year < 50:
+        year += 2000
+    else:
+        year += 1900
+    month = int(value[2:4])
+    day = int(value[4:6])
+    hour = int(value[6:8])
+    minute = int(value[8:10])
+    if value[-1] == "Z":
+        second = int(value[10:12])
+        dt = datetime.datetime(year, month, day, hour, minute, second)
+    else:
+        # Skip timezone...
+        dt = datetime.datetime(year, month, day, hour, minute)
+    return humanDatetime(dt)
+
+# --- Object parser ---
+
+class Object(FieldSet):
+    TYPE_INFO = {
+        0: ("end[]", None, "End (reserved for BER, None)", None), # TODO: Write parser
+        1: ("boolean[]", readBoolean, "Boolean", None),
+        2: ("integer[]", readInteger, "Integer", None),
+        3: ("bit_str[]", readBitString, "Bit string", None),
+        4: ("octet_str[]", readOctetString, "Octet string", None),
+        5: ("null[]", None, "NULL (empty, None)", None),
+        6: ("obj_id[]", readObjectID, "Object identifier", formatObjectID),
+        7: ("obj_desc[]", None, "Object descriptor", None), # TODO: Write parser
+        8: ("external[]", None, "External, instance of", None), # TODO: Write parser # External?
+        9: ("real[]", readASCIIString, "Real number", None), # TODO: Write parser
+        10: ("enum[]", readInteger, "Enumerated", None),
+        11: ("embedded[]", None, "Embedded PDV", None), # TODO: Write parser
+        12: ("utf8_str[]", readUTF8String, "Printable string", None),
+        13: ("rel_obj_id[]", None, "Relative object identifier", None), # TODO: Write parser
+        14: ("time[]", None, "Time", None), # TODO: Write parser
+      # 15: invalid??? sequence of???
+        16: ("seq[]", readSequence, "Sequence", None),
+        17: ("set[]", readSet, "Set", None),
+        18: ("num_str[]", readASCIIString, "Numeric string", None),
+        19: ("print_str[]", readASCIIString, "Printable string", formatValue),
+        20: ("teletex_str[]", readASCIIString, "Teletex (T61, None) string", None),
+        21: ("videotex_str[]", readASCIIString, "Videotex string", None),
+        22: ("ia5_str[]", readASCIIString, "IA5 string", formatValue),
+        23: ("utc_time[]", readASCIIString, "UTC time", formatUTCTime),
+        24: ("general_time[]", readASCIIString, "Generalized time", None),
+        25: ("graphic_str[]", readASCIIString, "Graphic string", None),
+        26: ("visible_str[]", readASCIIString, "Visible (ISO64, None) string", None),
+        27: ("general_str[]", readASCIIString, "General string", None),
+        28: ("universal_str[]", readASCIIString, "Universal string", None),
+        29: ("unrestricted_str[]", readASCIIString, "Unrestricted string", None),
+        30: ("bmp_str[]", readBMPString, "BMP string", None),
+      # 31: multiple octet tag number, TODO: not supported
+
+      # Extended tag values:
+      #   31: Date
+      #   32: Time of day
+      #   33: Date-time
+      #   34: Duration
+    }
+    TYPE_DESC = createDict(TYPE_INFO, 2)
+
+    CLASS_DESC = {0: "universal", 1: "application", 2: "context", 3: "private"}
+    FORM_DESC = {False: "primitive", True: "constructed"}
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        key = self["type"].value & 31
+        if self['class'].value == 0:
+            # universal object
+            if key in self.TYPE_INFO:
+                self._name, self._handler, self._description, create_desc = self.TYPE_INFO[key]
+                if create_desc:
+                    self.createDescription = lambda: "%s: %s" % (self.TYPE_INFO[key][2], create_desc(self))
+                    self._description = None
+            elif key == 31:
+                raise ParserError("ASN.1 Object: tag bigger than 30 are not supported")
+            else:
+                self._handler = None
+        elif self['form'].value:
+            # constructed: treat as sequence
+            self._name = 'seq[]'
+            self._handler = readSequence
+            self._description = 'constructed object type %i' % key
+        else:
+            # primitive, context/private
+            self._name = 'raw[]'
+            self._handler = readASCIIString
+            self._description = '%s object type %i' % (self['class'].display, key)
+        field = self["size"]
+        self._size = field.address + field.size + field.value*8
+
+    def createFields(self):
+        yield Enum(Bits(self, "class", 2), self.CLASS_DESC)
+        yield Enum(Bit(self, "form"), self.FORM_DESC)
+        if self['class'].value == 0:
+            yield Enum(Bits(self, "type", 5), self.TYPE_DESC)
+        else:
+            yield Bits(self, "type", 5)
+        yield ASNInteger(self, "size", "Size in bytes")
+        size = self["size"].value
+        if size:
+            if self._handler:
+                for field in self._handler(self, size):
+                    yield field
+            else:
+                yield RawBytes(self, "raw", size)
+
+class ASN1File(Parser):
+    PARSER_TAGS = {
+        "id": "asn1",
+        "category": "container",
+        "file_ext": ("der",),
+        "min_size": 16,
+        "description": "Abstract Syntax Notation One (ASN.1)"
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        try:
+            root = self[0]
+        except (InputStreamError, FieldError):
+            return "Unable to create root object"
+        if root.size != self.size:
+            return "Invalid root object size"
+        return True
+
+    def createFields(self):
+        yield Object(self, "root")
+
diff --git a/lib/hachoir_parser/container/mkv.py b/lib/hachoir_parser/container/mkv.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e90f460e57680050237e1dc86d15af0618ea8c3
--- /dev/null
+++ b/lib/hachoir_parser/container/mkv.py
@@ -0,0 +1,598 @@
+#
+# Matroska parser
+# Author Julien Muchembled <jm AT jm10.no-ip.com>
+# Created: 8 june 2006
+#
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, Link,
+    MissingField, ParserError,
+    Enum as _Enum, String as _String,
+    Float32, Float64,
+    NullBits, Bits, Bit, RawBytes, Bytes,
+    Int16, GenericInteger)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.iso639 import ISO639_2
+from hachoir_core.tools import humanDatetime
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_parser.container.ogg import XiphInt
+from datetime import datetime, timedelta
+
+class RawInt(GenericInteger):
+    """
+    Raw integer: have to be used in BIG_ENDIAN!
+    """
+    def __init__(self, parent, name, description=None):
+        GenericInteger.__init__(self, parent, name, False, 8, description)
+        i = GenericInteger.createValue(self)
+        if i == 0:
+            raise ParserError('Invalid integer length!')
+        while i < 0x80:
+            self._size += 8
+            i <<= 1
+
+class Unsigned(RawInt):
+    def __init__(self, parent, name, description=None):
+        RawInt.__init__(self, parent, name, description)
+
+    def hasValue(self):
+        return True
+    def createValue(self):
+        header = 1 << self._size / 8 * 7
+        value = RawInt.createValue(self) - header
+        if value + 1 == header:
+            return None
+        return value
+
+class Signed(Unsigned):
+    def createValue(self):
+        header = 1 << self._size / 8 * 7 - 1
+        value = RawInt.createValue(self) - 3 * header + 1
+        if value == header:
+            return None
+        return value
+
+def Enum(parent, enum):
+    return _Enum(GenericInteger(parent, 'enum', False, parent['size'].value*8), enum)
+
+def Bool(parent):
+    return textHandler(GenericInteger(parent, 'bool', False, parent['size'].value*8),
+        lambda chunk: str(chunk.value != 0))
+
+def UInt(parent):
+    return GenericInteger(parent, 'unsigned', False, parent['size'].value*8)
+
+def SInt(parent):
+    return GenericInteger(parent, 'signed', True, parent['size'].value*8)
+
+def String(parent):
+    return _String(parent, 'string', parent['size'].value, charset="ASCII")
+
+def EnumString(parent, enum):
+    return _Enum(String(parent), enum)
+
+def Binary(parent):
+    return RawBytes(parent, 'binary', parent['size'].value)
+
+class AttachedFile(Bytes):
+    def __init__(self, parent):
+        Bytes.__init__(self, parent, 'file', parent['size'].value, None)
+    def _getFilename(self):
+        if not hasattr(self, "_filename"):
+            try:
+                self._filename = self["../../FileName/unicode"].value
+            except MissingField:
+                self._filename = None
+        return self._filename
+    def createDescription(self):
+        filename = self._getFilename()
+        if filename:
+            return 'File "%s"' % filename
+        return "('Filename' entry not found)"
+    def _createInputStream(self, **args):
+        tags = args.setdefault("tags",[])
+        try:
+            tags.append(("mime", self["../../FileMimeType/string"].value))
+        except MissingField:
+            pass
+        filename = self._getFilename()
+        if filename:
+            tags.append(("filename", filename))
+        return Bytes._createInputStream(self, **args)
+
+def UTF8(parent):
+    return _String(parent,'unicode', parent['size'].value, charset='UTF-8')
+
+def Float(parent):
+    size = parent['size'].value
+    if size == 4:
+        return Float32(parent, 'float')
+    elif size == 8:
+        return Float64(parent, 'double')
+    else:
+        return RawBytes(parent, 'INVALID_FLOAT', size)
+
+TIMESTAMP_T0 = datetime(2001, 1, 1)
+
+def dateToDatetime(value):
+    return TIMESTAMP_T0 + timedelta(microseconds=value//1000)
+
+def dateToString(field):
+    return humanDatetime(dateToDatetime(field.value))
+
+def Date(parent):
+    return textHandler(GenericInteger(parent, 'date', True, parent['size'].value*8),
+        dateToString)
+
+def SeekID(parent):
+    return textHandler(GenericInteger(parent, 'binary', False, parent['size'].value*8),
+        lambda chunk: segment.get(chunk.value, (hexadecimal(chunk),))[0])
+
+def CueClusterPosition(parent):
+    class Cluster(Link):
+        def createValue(self):
+            parent = self.parent
+            segment = parent['.....']
+            pos = parent['unsigned'].value * 8 + segment[2].address
+            return segment.getFieldByAddress(pos, feed=False)
+    return Cluster(parent, 'cluster')
+
+def CueTrackPositions(parent):
+    class Block(Link):
+        def createValue(self):
+            parent = self.parent
+            time = parent['../CueTime/unsigned'].value
+            track = parent['CueTrack/unsigned'].value
+            cluster = parent['CueClusterPosition/cluster'].value
+            time -= cluster['Timecode/unsigned'].value
+            for field in cluster:
+                if field.name.startswith('BlockGroup['):
+                    for path in 'Block/block', 'SimpleBlock':
+                        try:
+                            block = field[path]
+                            if block['track'].value == track and \
+                               block['timecode'].value == time:
+                                return field
+                        except MissingField:
+                            pass
+            parent.error('Cue point not found')
+            return self
+    return Block(parent, 'block')
+
+class Lace(FieldSet):
+    def __init__(self, parent, lacing, size):
+        self.n_frames = parent['n_frames'].value
+        self.createFields = ( self.parseXiph, self.parseFixed, self.parseEBML )[lacing]
+        FieldSet.__init__(self, parent, 'Lace', size=size * 8)
+
+    def parseXiph(self):
+        for i in xrange(self.n_frames):
+            yield XiphInt(self, 'size[]')
+        for i in xrange(self.n_frames):
+            yield RawBytes(self, 'frame[]', self['size['+str(i)+']'].value)
+        yield RawBytes(self,'frame[]', (self._size - self.current_size) / 8)
+
+    def parseEBML(self):
+        yield Unsigned(self, 'size')
+        for i in xrange(1, self.n_frames):
+            yield Signed(self, 'dsize[]')
+        size = self['size'].value
+        yield RawBytes(self, 'frame[]', size)
+        for i in xrange(self.n_frames-1):
+            size += self['dsize['+str(i)+']'].value
+            yield RawBytes(self, 'frame[]', size)
+        yield RawBytes(self,'frame[]', (self._size - self.current_size) / 8)
+
+    def parseFixed(self):
+        n = self.n_frames + 1
+        size = self._size / 8 / n
+        for i in xrange(n):
+            yield RawBytes(self, 'frame[]', size)
+
+class Block(FieldSet):
+    def __init__(self, parent):
+        FieldSet.__init__(self, parent, 'block')
+        self._size = 8 * parent['size'].value
+
+    def lacing(self):
+        return _Enum(Bits(self, 'lacing', 2), [ 'none', 'Xiph', 'fixed', 'EBML' ])
+
+    def createFields(self):
+        yield Unsigned(self, 'track')
+        yield Int16(self, 'timecode')
+
+        if self.parent._name == 'Block':
+            yield NullBits(self, 'reserved[]', 4)
+            yield Bit(self, 'invisible')
+            yield self.lacing()
+            yield NullBits(self, 'reserved[]', 1)
+        elif self.parent._name == 'SimpleBlock[]':
+            yield Bit(self, 'keyframe')
+            yield NullBits(self, 'reserved', 3)
+            yield Bit(self, 'invisible')
+            yield self.lacing()
+            yield Bit(self, 'discardable')
+        else:
+            yield NullBits(self, 'reserved', 8)
+            return
+
+        size = (self._size - self.current_size) / 8
+        lacing = self['lacing'].value
+        if lacing:
+            yield textHandler(GenericInteger(self, 'n_frames', False, 8),
+                lambda chunk: str(chunk.value+1))
+            yield Lace(self, lacing - 1, size - 1)
+        else:
+            yield RawBytes(self,'frame', size)
+
+ebml = {
+    0x1A45DFA3: ('EBML[]', {
+        0x4286: ('EBMLVersion',UInt),
+        0x42F7: ('EBMLReadVersion',UInt),
+        0x42F2: ('EBMLMaxIDLength',UInt),
+        0x42F3: ('EBMLMaxSizeLength',UInt),
+        0x4282: ('DocType',String),
+        0x4287: ('DocTypeVersion',UInt),
+        0x4285: ('DocTypeReadVersion',UInt)
+        })
+}
+
+signature = {
+    0x7E8A: ('SignatureAlgo', UInt),
+    0x7E9A: ('SignatureHash', UInt),
+    0x7EA5: ('SignaturePublicKey', Binary),
+    0x7EB5: ('Signature', Binary),
+    0x7E5B: ('SignatureElements', {
+        0x7E7B: ('SignatureElementList[]', {
+            0x6532: ('SignedElement[]', Binary)
+            })
+        })
+}
+
+chapter_atom = {
+    0x73C4: ('ChapterUID', UInt),
+    0x91:   ('ChapterTimeStart', UInt),
+    0x92:   ('ChapterTimeEnd', UInt),
+    0x98:   ('ChapterFlagHidden', Bool),
+    0x4598: ('ChapterFlagEnabled', Bool),
+    0x6E67: ('ChapterSegmentUID', Binary),
+    0x6EBC: ('ChapterSegmentEditionUID', Binary),
+    0x63C3: ('ChapterPhysicalEquiv', UInt),
+    0x8F:   ('ChapterTrack', {
+        0x89:   ('ChapterTrackNumber[]', UInt)
+        }),
+    0x80:   ('ChapterDisplay[]', {
+        0x85:   ('ChapString', UTF8),
+        0x437C: ('ChapLanguage[]', String),
+        0x437E: ('ChapCountry[]', String)
+        }),
+    0x6944: ('ChapProcess[]', {
+        0x6955: ('ChapProcessCodecID', UInt),
+        0x450D: ('ChapProcessPrivate', Binary),
+        0x6911: ('ChapProcessCommand[]', {
+        0x6922: ('ChapProcessTime', UInt),
+        0x6933: ('ChapProcessData', Binary)
+        })
+        })
+}
+
+simple_tag = {
+    0x45A3: ('TagName', UTF8),
+    0x447A: ('TagLanguage', String),
+    0x44B4: ('TagDefault', Bool), # 0x4484
+    0x4487: ('TagString', UTF8),
+    0x4485: ('TagBinary', Binary)
+}
+
+segment_seek = {
+    0x4DBB:     ('Seek[]', {
+        0x53AB:     ('SeekID', SeekID),
+        0x53AC:     ('SeekPosition', UInt)
+        })
+}
+
+segment_info = {
+    0x73A4:     ('SegmentUID', Binary),
+    0x7384:     ('SegmentFilename', UTF8),
+    0x3CB923:   ('PrevUID', Binary),
+    0x3C83AB:   ('PrevFilename', UTF8),
+    0x3EB923:   ('NextUID', Binary),
+    0x3E83BB:   ('NextFilename', UTF8),
+    0x4444:     ('SegmentFamily[]', Binary),
+    0x6924:     ('ChapterTranslate[]', {
+        0x69FC:     ('ChapterTranslateEditionUID[]', UInt),
+        0x69BF:     ('ChapterTranslateCodec', UInt),
+        0x69A5:     ('ChapterTranslateID', Binary)
+        }),
+    0x2AD7B1:   ('TimecodeScale', UInt),
+    0x4489:     ('Duration', Float),
+    0x4461:     ('DateUTC', Date),
+    0x7BA9:     ('Title', UTF8),
+    0x4D80:     ('MuxingApp', UTF8),
+    0x5741:     ('WritingApp', UTF8)
+}
+
+segment_clusters = {
+    0xE7:       ('Timecode', UInt),
+    0x5854:     ('SilentTracks', {
+        0x58D7:     ('SilentTrackNumber[]', UInt)
+        }),
+    0xA7:       ('Position', UInt),
+    0xAB:       ('PrevSize', UInt),
+    0xA0:       ('BlockGroup[]', {
+        0xA1:       ('Block', Block),
+        0xA2:       ('BlockVirtual[]', Block),
+        0x75A1:     ('BlockAdditions', {
+            0xA6:       ('BlockMore[]', {
+                0xEE:       ('BlockAddID', UInt),
+                0xA5:       ('BlockAdditional', Binary)
+                })
+            }),
+        0x9B:       ('BlockDuration', UInt),
+        0xFA:       ('ReferencePriority', UInt),
+        0xFB:       ('ReferenceBlock[]', SInt),
+        0xFD:       ('ReferenceVirtual', SInt),
+        0xA4:       ('CodecState', Binary),
+        0x8E:       ('Slices[]', {
+            0xE8:       ('TimeSlice[]', {
+                0xCC:       ('LaceNumber', UInt),
+                0xCD:       ('FrameNumber', UInt),
+                0xCB:       ('BlockAdditionID', UInt),
+                0xCE:       ('Delay', UInt),
+                0xCF:       ('Duration', UInt)
+                })
+            })
+        }),
+    0xA3:       ('SimpleBlock[]', Block)
+}
+
+tracks_video = {
+    0x9A:       ('FlagInterlaced', Bool),
+    0x53B8:     ('StereoMode', lambda parent: Enum(parent, \
+        [ 'mono', 'right eye', 'left eye', 'both eyes' ])),
+    0xB0:       ('PixelWidth', UInt),
+    0xBA:       ('PixelHeight', UInt),
+    0x54AA:     ('PixelCropBottom', UInt),
+    0x54BB:     ('PixelCropTop', UInt),
+    0x54CC:     ('PixelCropLeft', UInt),
+    0x54DD:     ('PixelCropRight', UInt),
+    0x54B0:     ('DisplayWidth', UInt),
+    0x54BA:     ('DisplayHeight', UInt),
+    0x54B2:     ('DisplayUnit', lambda parent: Enum(parent, \
+        [ 'pixels', 'centimeters', 'inches' ])),
+    0x54B3:     ('AspectRatioType', lambda parent: Enum(parent, \
+        [ 'free resizing', 'keep aspect ratio', 'fixed' ])),
+    0x2EB524:   ('ColourSpace', Binary),
+    0x2FB523:   ('GammaValue', Float)
+}
+
+tracks_audio = {
+    0xB5:       ('SamplingFrequency', Float),
+    0x78B5:     ('OutputSamplingFrequency', Float),
+    0x9F:       ('Channels', UInt),
+    0x7D7B:     ('ChannelPositions', Binary),
+    0x6264:     ('BitDepth', UInt)
+}
+
+tracks_content_encodings = {
+    0x6240:     ('ContentEncoding[]', {
+        0x5031:     ('ContentEncodingOrder', UInt),
+        0x5032:     ('ContentEncodingScope', UInt),
+        0x5033:     ('ContentEncodingType', UInt),
+        0x5034:     ('ContentCompression', {
+            0x4254:     ('ContentCompAlgo', UInt),
+            0x4255:     ('ContentCompSettings', Binary)
+            }),
+        0x5035:     ('ContentEncryption', {
+            0x47e1:     ('ContentEncAlgo', UInt),
+            0x47e2:     ('ContentEncKeyID', Binary),
+            0x47e3:     ('ContentSignature', Binary),
+            0x47e4:     ('ContentSigKeyID', Binary),
+            0x47e5:     ('ContentSigAlgo', UInt),
+            0x47e6:     ('ContentSigHashAlgo', UInt),
+            })
+        })
+}
+
+segment_tracks = {
+    0xAE:       ('TrackEntry[]', {
+        0xD7:       ('TrackNumber', UInt),
+        0x73C5:     ('TrackUID', UInt),
+        0x83:       ('TrackType', lambda parent: Enum(parent, {
+            0x01: 'video',
+            0x02: 'audio',
+            0x03: 'complex',
+            0x10: 'logo',
+            0x11: 'subtitle',
+            0x12: 'buttons',
+            0x20: 'control'
+            })),
+        0xB9:       ('FlagEnabled', Bool),
+        0x88:       ('FlagDefault', Bool),
+        0x55AA:     ('FlagForced[]', Bool),
+        0x9C:       ('FlagLacing', Bool),
+        0x6DE7:     ('MinCache', UInt),
+        0x6DF8:     ('MaxCache', UInt),
+        0x23E383:   ('DefaultDuration', UInt),
+        0x23314F:   ('TrackTimecodeScale', Float),
+        0x537F:     ('TrackOffset', SInt),
+        0x55EE:     ('MaxBlockAdditionID', UInt),
+        0x536E:     ('Name', UTF8),
+        0x22B59C:   ('Language', lambda parent: EnumString(parent, ISO639_2)),
+        0x86:       ('CodecID', String),
+        0x63A2:     ('CodecPrivate', Binary),
+        0x258688:   ('CodecName', UTF8),
+        0x7446:     ('AttachmentLink', UInt),
+        0x3A9697:   ('CodecSettings', UTF8),
+        0x3B4040:   ('CodecInfoURL[]', String),
+        0x26B240:   ('CodecDownloadURL[]', String),
+        0xAA:       ('CodecDecodeAll', Bool),
+        0x6FAB:     ('TrackOverlay[]', UInt),
+        0x6624:     ('TrackTranslate[]', {
+            0x66FC:     ('TrackTranslateEditionUID[]', UInt),
+            0x66BF:     ('TrackTranslateCodec', UInt),
+            0x66A5:     ('TrackTranslateTrackID', Binary)
+            }),
+        0xE0:       ('Video', tracks_video),
+        0xE1:       ('Audio', tracks_audio),
+        0x6d80:     ('ContentEncodings', tracks_content_encodings)
+        })
+}
+
+segment_cues = {
+    0xBB:       ('CuePoint[]', {
+        0xB3:       ('CueTime', UInt),
+        0xB7:       ('CueTrackPositions[]', CueTrackPositions, {
+            0xF7:       ('CueTrack', UInt),
+            0xF1:       ('CueClusterPosition', CueClusterPosition, UInt),
+            0x5378:     ('CueBlockNumber', UInt),
+            0xEA:       ('CueCodecState', UInt),
+            0xDB:       ('CueReference[]', {
+                0x96:       ('CueRefTime', UInt),
+                0x97:       ('CueRefCluster', UInt),
+                0x535F:     ('CueRefNumber', UInt),
+                0xEB:       ('CueRefCodecState', UInt)
+                })
+            })
+        })
+}
+
+segment_attachments = {
+    0x61A7:     ('AttachedFile[]', {
+        0x467E:     ('FileDescription', UTF8),
+        0x466E:     ('FileName', UTF8),
+        0x4660:     ('FileMimeType', String),
+        0x465C:     ('FileData', AttachedFile),
+        0x46AE:     ('FileUID', UInt),
+        0x4675:     ('FileReferral', Binary)
+        })
+}
+
+segment_chapters = {
+    0x45B9:     ('EditionEntry[]', {
+        0x45BC:     ('EditionUID', UInt),
+        0x45BD:     ('EditionFlagHidden', Bool),
+        0x45DB:     ('EditionFlagDefault', Bool),
+        0x45DD:     ('EditionFlagOrdered', Bool),
+        0xB6:       ('ChapterAtom[]', chapter_atom)
+        })
+}
+
+segment_tags = {
+    0x7373:     ('Tag[]', {
+        0x63C0:     ('Targets', {
+            0x68CA:     ('TargetTypeValue', UInt),
+            0x63CA:     ('TargetType', String),
+            0x63C5:     ('TrackUID[]', UInt),
+            0x63C9:     ('EditionUID[]', UInt),
+            0x63C4:     ('ChapterUID[]', UInt),
+            0x63C6:     ('AttachmentUID[]', UInt)
+            }),
+        0x67C8:     ('SimpleTag[]', simple_tag)
+        })
+}
+
+segment = {
+    0x114D9B74: ('SeekHead[]', segment_seek),
+    0x1549A966: ('Info[]', segment_info),
+    0x1F43B675: ('Cluster[]', segment_clusters),
+    0x1654AE6B: ('Tracks[]', segment_tracks),
+    0x1C53BB6B: ('Cues', segment_cues),
+    0x1941A469: ('Attachments', segment_attachments),
+    0x1043A770: ('Chapters', segment_chapters),
+    0x1254C367: ('Tags[]', segment_tags)
+}
+
+class EBML(FieldSet):
+    def __init__(self, parent, ids):
+        FieldSet.__init__(self, parent, "?[]")
+
+        # Set name
+        id = self['id'].value
+        self.val = ids.get(id)
+        if not self.val:
+            if id == 0xBF:
+                self.val = 'CRC-32[]', Binary
+            elif id == 0xEC:
+                self.val = 'Void[]', Binary
+            elif id == 0x1B538667:
+                self.val = 'SignatureSlot[]', signature
+            else:
+                self.val = 'Unknown[]', Binary
+        self._name = self.val[0]
+
+        # Compute size
+        size = self['size']
+        if size.value is not None:
+            self._size = size.address + size.size + size.value * 8
+        elif self._parent._parent:
+            raise ParserError("Unknown length (only allowed for the last Level 0 element)")
+        elif self._parent._size is not None:
+            self._size = self._parent._size - self.address
+
+    def createFields(self):
+        yield RawInt(self, 'id')
+        yield Unsigned(self, 'size')
+        for val in self.val[1:]:
+            if callable(val):
+                yield val(self)
+            else:
+                while not self.eof:
+                    yield EBML(self, val)
+
+class MkvFile(Parser):
+    EBML_SIGNATURE = 0x1A45DFA3
+    PARSER_TAGS = {
+        "id": "matroska",
+        "category": "container",
+        "file_ext": ("mka", "mkv", "webm"),
+        "mime": (
+            u"video/x-matroska",
+            u"audio/x-matroska",
+            u"video/webm",
+            u"audio/webm"),
+        "min_size": 5*8,
+        "magic": (("\x1A\x45\xDF\xA3", 0),),
+        "description": "Matroska multimedia container"
+    }
+    endian = BIG_ENDIAN
+
+    def _getDoctype(self):
+        return self[0]['DocType/string'].value
+
+    def validate(self):
+        if self.stream.readBits(0, 32, self.endian) != self.EBML_SIGNATURE:
+            return False
+        try:
+            first = self[0]
+        except ParserError:
+            return False
+        if None < self._size < first._size:
+            return "First chunk size is invalid"
+        if self._getDoctype() not in ('matroska', 'webm'):
+            return "Stream isn't a matroska document."
+        return True
+
+    def createFields(self):
+        hdr = EBML(self, ebml)
+        yield hdr
+
+        while not self.eof:
+            yield EBML(self, { 0x18538067: ('Segment[]', segment) })
+
+    def createContentSize(self):
+        field = self["Segment[0]/size"]
+        return field.absolute_address + field.value * 8 + field.size
+
+    def createDescription(self):
+        if self._getDoctype() == 'webm':
+            return 'WebM video'
+        else:
+            return 'Matroska video'
+
+    def createMimeType(self):
+        if self._getDoctype() == 'webm':
+            return u"video/webm"
+        else:
+            return u"video/x-matroska"
+
diff --git a/lib/hachoir_parser/container/ogg.py b/lib/hachoir_parser/container/ogg.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa2d26cba99c6ab1e50b668aae02ec8f4eaaf2da
--- /dev/null
+++ b/lib/hachoir_parser/container/ogg.py
@@ -0,0 +1,349 @@
+#
+# Ogg parser
+# Author Julien Muchembled <jm AT jm10.no-ip.com>
+# Created: 10 june 2006
+#
+
+from hachoir_parser import Parser
+from hachoir_core.field import (Field, FieldSet, createOrphanField,
+    NullBits, Bit, Bits, Enum, Fragment, MissingField, ParserError,
+    UInt8, UInt16, UInt24, UInt32, UInt64,
+    RawBytes, String, PascalString32, NullBytes)
+from hachoir_core.stream import FragmentedStream, InputStreamError
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+from hachoir_core.tools import humanDurationNanosec
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+MAX_FILESIZE = 1000 * 1024 * 1024
+
+class XiphInt(Field):
+    """
+    Positive integer with variable size. Values bigger than 254 are stored as
+    (255, 255, ..., rest): value is the sum of all bytes.
+
+    Example: 1000 is stored as (255, 255, 255, 235), total = 255*3+235 = 1000
+    """
+    def __init__(self, parent, name, max_size=None, description=None):
+        Field.__init__(self, parent, name, size=0, description=description)
+        value = 0
+        addr = self.absolute_address
+        while max_size is None or self._size < max_size:
+            byte = parent.stream.readBits(addr, 8, LITTLE_ENDIAN)
+            value += byte
+            self._size += 8
+            if byte != 0xff:
+                break
+            addr += 8
+        self.createValue = lambda: value
+
+class Lacing(FieldSet):
+    def createFields(self):
+        size = self.size
+        while size:
+            field = XiphInt(self, 'size[]', size)
+            yield field
+            size -= field.size
+
+def parseVorbisComment(parent):
+    yield PascalString32(parent, 'vendor', charset="UTF-8")
+    yield UInt32(parent, 'count')
+    for index in xrange(parent["count"].value):
+        yield PascalString32(parent, 'metadata[]', charset="UTF-8")
+    if parent.current_size != parent.size:
+        yield UInt8(parent, "framing_flag")
+
+PIXEL_FORMATS = {
+    0: "4:2:0",
+    2: "4:2:2",
+    3: "4:4:4",
+}
+
+def formatTimeUnit(field):
+    return humanDurationNanosec(field.value * 100)
+
+def parseVideoHeader(parent):
+    yield NullBytes(parent, "padding[]", 2)
+    yield String(parent, "fourcc", 4)
+    yield UInt32(parent, "size")
+    yield textHandler(UInt64(parent, "time_unit", "Frame duration"), formatTimeUnit)
+    yield UInt64(parent, "sample_per_unit")
+    yield UInt32(parent, "default_len")
+    yield UInt32(parent, "buffer_size")
+    yield UInt16(parent, "bits_per_sample")
+    yield NullBytes(parent, "padding[]", 2)
+    yield UInt32(parent, "width")
+    yield UInt32(parent, "height")
+    yield NullBytes(parent, "padding[]", 4)
+
+def parseTheoraHeader(parent):
+    yield UInt8(parent, "version_major")
+    yield UInt8(parent, "version_minor")
+    yield UInt8(parent, "version_revision")
+    yield UInt16(parent, "width", "Width*16 in pixel")
+    yield UInt16(parent, "height", "Height*16 in pixel")
+
+    yield UInt24(parent, "frame_width")
+    yield UInt24(parent, "frame_height")
+    yield UInt8(parent, "offset_x")
+    yield UInt8(parent, "offset_y")
+
+    yield UInt32(parent, "fps_num", "Frame per second numerator")
+    yield UInt32(parent, "fps_den", "Frame per second denominator")
+    yield UInt24(parent, "aspect_ratio_num", "Aspect ratio numerator")
+    yield UInt24(parent, "aspect_ratio_den", "Aspect ratio denominator")
+
+    yield UInt8(parent, "color_space")
+    yield UInt24(parent, "target_bitrate")
+    yield Bits(parent, "quality", 6)
+    yield Bits(parent, "gp_shift", 5)
+    yield Enum(Bits(parent, "pixel_format", 2), PIXEL_FORMATS)
+    yield Bits(parent, "spare_config", 3)
+
+def parseVorbisHeader(parent):
+    yield UInt32(parent, "vorbis_version")
+    yield UInt8(parent, "audio_channels")
+    yield UInt32(parent, "audio_sample_rate")
+    yield UInt32(parent, "bitrate_maximum")
+    yield UInt32(parent, "bitrate_nominal")
+    yield UInt32(parent, "bitrate_minimum")
+    yield Bits(parent, "blocksize_0", 4)
+    yield Bits(parent, "blocksize_1", 4)
+    yield UInt8(parent, "framing_flag")
+
+class Chunk(FieldSet):
+    tag_info = {
+        "vorbis": {
+            3: ("comment", parseVorbisComment),
+            1: ("vorbis_hdr", parseVorbisHeader),
+        }, "theora": {
+            128: ("theora_hdr", parseTheoraHeader),
+            129: ("comment", parseVorbisComment),
+        }, "video\0": {
+            1: ("video_hdr", parseVideoHeader),
+        },
+    }
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        if 7*8 <= self.size:
+            try:
+                self._name, self.parser = self.tag_info[self["codec"].value][self["type"].value]
+                if self._name == "theora_hdr":
+                    self.endian = BIG_ENDIAN
+            except KeyError:
+                self.parser = None
+        else:
+            self.parser = None
+
+    def createFields(self):
+        if 7*8 <= self.size:
+            yield UInt8(self, 'type')
+            yield String(self, 'codec', 6)
+        if self.parser:
+            for field in self.parser(self):
+                yield field
+        else:
+            size = (self.size - self.current_size) // 8
+            if size:
+                yield RawBytes(self, "raw", size)
+
+class Packets:
+    def __init__(self, first):
+        self.first = first
+
+    def __iter__(self):
+        fragment = self.first
+        size = None
+        while fragment is not None:
+            page = fragment.parent
+            continued_packet = page["continued_packet"].value
+            for segment_size in page.segment_size:
+                if continued_packet:
+                    size += segment_size
+                    continued_packet = False
+                else:
+                    if size:
+                        yield size * 8
+                    size = segment_size
+            fragment = fragment.next
+        if size:
+            yield size * 8
+
+class Segments(Fragment):
+    def __init__(self, parent, *args, **kw):
+        Fragment.__init__(self, parent, *args, **kw)
+        if parent['last_page'].value:
+            next = None
+        else:
+            next = self.createNext
+        self.setLinks(parent.parent.streams.setdefault(parent['serial'].value, self), next)
+
+    def _createInputStream(self, **args):
+        if self.first is self:
+            return FragmentedStream(self, packets=Packets(self), tags=[("id","ogg_stream")], **args)
+        return Fragment._createInputStream(self, **args)
+
+    def _getData(self):
+        return self
+
+    def createNext(self):
+        parent = self.parent
+        index = parent.index
+        parent = parent.parent
+        first = self.first
+        try:
+            while True:
+                index += 1
+                next = parent[index][self.name]
+                if next.first is first:
+                    return next
+        except MissingField:
+            pass
+
+    def createFields(self):
+        for segment_size in self.parent.segment_size:
+            if segment_size:
+                yield Chunk(self, "chunk[]", size=segment_size*8)
+
+class OggPage(FieldSet):
+    MAGIC = "OggS"
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        size = 27
+        self.lacing_size = self['lacing_size'].value
+        if self.lacing_size:
+            size += self.lacing_size
+            lacing = self['lacing']
+            self.segment_size = [ field.value for field in lacing ]
+            size += sum(self.segment_size)
+        self._size = size * 8
+
+    def createFields(self):
+        yield String(self, 'capture_pattern', 4, charset="ASCII")
+        if self['capture_pattern'].value != self.MAGIC:
+            self.warning('Invalid signature. An Ogg page must start with "%s".' % self.MAGIC)
+        yield UInt8(self, 'stream_structure_version')
+        yield Bit(self, 'continued_packet')
+        yield Bit(self, 'first_page')
+        yield Bit(self, 'last_page')
+        yield NullBits(self, 'unused', 5)
+        yield UInt64(self, 'abs_granule_pos')
+        yield textHandler(UInt32(self, 'serial'), hexadecimal)
+        yield UInt32(self, 'page')
+        yield textHandler(UInt32(self, 'checksum'), hexadecimal)
+        yield UInt8(self, 'lacing_size')
+        if self.lacing_size:
+            yield Lacing(self, "lacing", size=self.lacing_size*8)
+            yield Segments(self, "segments", size=self._size-self._current_size)
+
+    def validate(self):
+        if self['capture_pattern'].value != self.MAGIC:
+            return "Wrong signature"
+        if self['stream_structure_version'].value != 0:
+            return "Unknown structure version (%s)" % self['stream_structure_version'].value
+        return ""
+
+class OggFile(Parser):
+    PARSER_TAGS = {
+        "id": "ogg",
+        "category": "container",
+        "file_ext": ("ogg", "ogm"),
+        "mime": (
+            u"application/ogg", u"application/x-ogg",
+            u"audio/ogg", u"audio/x-ogg",
+            u"video/ogg", u"video/x-ogg",
+            u"video/theora", u"video/x-theora",
+         ),
+        "magic": ((OggPage.MAGIC, 0),),
+        "subfile": "skip",
+        "min_size": 28*8,
+        "description": "Ogg multimedia container"
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        magic = OggPage.MAGIC
+        if self.stream.readBytes(0, len(magic)) != magic:
+            return "Invalid magic string"
+        # Validate first 3 pages
+        for index in xrange(3):
+            try:
+                page = self[index]
+            except MissingField:
+                if self.done:
+                    return True
+                return "Unable to get page #%u" % index
+            except (InputStreamError, ParserError):
+                return "Unable to create page #%u" % index
+            err = page.validate()
+            if err:
+                return "Invalid page #%s: %s" % (index, err)
+        return True
+
+    def createMimeType(self):
+        if "theora_hdr" in self["page[0]/segments"]:
+            return u"video/theora"
+        elif "vorbis_hdr" in self["page[0]/segments"]:
+            return u"audio/vorbis"
+        else:
+            return u"application/ogg"
+
+    def createDescription(self):
+        if "theora_hdr" in self["page[0]"]:
+            return u"Ogg/Theora video"
+        elif "vorbis_hdr" in self["page[0]"]:
+            return u"Ogg/Vorbis audio"
+        else:
+            return u"Ogg multimedia container"
+
+    def createFields(self):
+        self.streams = {}
+        while not self.eof:
+            yield OggPage(self, "page[]")
+
+    def createLastPage(self):
+        start = self[0].size
+        end = MAX_FILESIZE * 8
+        if True:
+            # FIXME: This doesn't work on all files (eg. some Ogg/Theora)
+            offset = self.stream.searchBytes("OggS\0\5", start, end)
+            if offset is None:
+                offset = self.stream.searchBytes("OggS\0\4", start, end)
+            if offset is None:
+                return None
+            return createOrphanField(self, offset, OggPage, "page")
+        else:
+            # Very slow version
+            page = None
+            while True:
+                offset = self.stream.searchBytes("OggS\0", start, end)
+                if offset is None:
+                    break
+                page = createOrphanField(self, offset, OggPage, "page")
+                start += page.size
+            return page
+
+    def createContentSize(self):
+        page = self.createLastPage()
+        if page:
+            return page.absolute_address + page.size
+        else:
+            return None
+
+
+class OggStream(Parser):
+    PARSER_TAGS = {
+        "id": "ogg_stream",
+        "category": "container",
+        "subfile": "skip",
+        "min_size": 7*8,
+        "description": "Ogg logical stream"
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        return False
+
+    def createFields(self):
+        for size in self.stream.packets:
+            yield RawBytes(self, "packet[]", size//8)
diff --git a/lib/hachoir_parser/container/realmedia.py b/lib/hachoir_parser/container/realmedia.py
new file mode 100644
index 0000000000000000000000000000000000000000..45c8173bbb6ab32e94abd8b42ac3ec55ee9ce0e5
--- /dev/null
+++ b/lib/hachoir_parser/container/realmedia.py
@@ -0,0 +1,172 @@
+"""
+RealMedia (.rm) parser
+
+Author: Mike Melanson
+Creation date: 15 december 2006
+
+References:
+- http://wiki.multimedia.cx/index.php?title=RealMedia
+- Appendix E: RealMedia File Format (RMFF) Reference
+  https://common.helixcommunity.org/nonav/2003/HCS_SDK_r5/htmfiles/rmff.htm
+
+Samples:
+- http://samples.mplayerhq.hu/real/
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt16, UInt32, Bit, RawBits,
+    RawBytes, String, PascalString8, PascalString16)
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.endian import BIG_ENDIAN
+
+def parseHeader(self):
+    yield UInt32(self, "filever", "File version")
+    yield UInt32(self, "numheaders", "number of headers")
+
+def parseFileProperties(self):
+    yield UInt32(self, "max_bit_rate", "Maximum bit rate")
+    yield UInt32(self, "avg_bit_rate", "Average bit rate")
+    yield UInt32(self, "max_pkt_size", "Size of largest data packet")
+    yield UInt32(self, "avg_pkt_size", "Size of average data packet")
+    yield UInt32(self, "num_pkts", "Number of data packets")
+    yield UInt32(self, "duration", "File duration in milliseconds")
+    yield UInt32(self, "preroll", "Suggested preroll in milliseconds")
+    yield textHandler(UInt32(self, "index_offset", "Absolute offset of first index chunk"), hexadecimal)
+    yield textHandler(UInt32(self, "data_offset", "Absolute offset of first data chunk"), hexadecimal)
+    yield UInt16(self, "stream_count", "Number of streams in the file")
+    yield RawBits(self, "reserved", 13)
+    yield Bit(self, "is_live", "Whether file is a live broadcast")
+    yield Bit(self, "is_perfect_play", "Whether PerfectPlay can be used")
+    yield Bit(self, "is_saveable", "Whether file can be saved")
+
+def parseContentDescription(self):
+    yield PascalString16(self, "title", charset="ISO-8859-1", strip=" \0")
+    yield PascalString16(self, "author", charset="ISO-8859-1", strip=" \0")
+    yield PascalString16(self, "copyright", charset="ISO-8859-1", strip=" \0")
+    yield PascalString16(self, "comment", charset="ISO-8859-1", strip=" \0")
+
+
+class NameValueProperty(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["size"].value * 8
+
+    def createFields(self):
+        yield UInt32(self, "size")
+        yield UInt16(self, "obj_version")
+        yield PascalString8(self, "name", charset="ASCII")
+        yield UInt32(self, "type")
+        yield PascalString16(self, "value", charset="ISO-8859-1", strip=" \0")
+
+class LogicalFileInfo(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "size")
+        yield UInt16(self, "obj_version")
+        yield UInt16(self, "nb_physical_stream")
+        for index in xrange(self["nb_physical_stream"].value):
+            yield UInt16(self, "physical_stream[]")
+        for index in xrange(self["nb_physical_stream"].value):
+            yield UInt16(self, "data_offset[]")
+        yield UInt16(self, "nb_rule")
+        for index in xrange(self["nb_rule"].value):
+            yield UInt16(self, "rule[]")
+        yield UInt16(self, "nb_prop")
+        for index in xrange(self["nb_prop"].value):
+            yield NameValueProperty(self, "prop[]")
+
+def parseMediaPropertiesHeader(self):
+    yield UInt16(self, "stream_number", "Stream number")
+    yield UInt32(self, "max_bit_rate", "Maximum bit rate")
+    yield UInt32(self, "avg_bit_rate", "Average bit rate")
+    yield UInt32(self, "max_pkt_size", "Size of largest data packet")
+    yield UInt32(self, "avg_pkt_size", "Size of average data packet")
+    yield UInt32(self, "stream_start", "Stream start offset in milliseconds")
+    yield UInt32(self, "preroll", "Preroll in milliseconds")
+    yield UInt32(self, "duration", "Stream duration in milliseconds")
+    yield PascalString8(self, "desc", "Stream description", charset="ISO-8859-1")
+    yield PascalString8(self, "mime_type", "MIME type string", charset="ASCII")
+    yield UInt32(self, "specific_size", "Size of type-specific data")
+    size = self['specific_size'].value
+    if size:
+        if self["mime_type"].value == "logical-fileinfo":
+            yield LogicalFileInfo(self, "file_info", size=size*8)
+        else:
+            yield RawBytes(self, "specific", size, "Type-specific data")
+
+class Chunk(FieldSet):
+    tag_info = {
+        ".RMF": ("header", parseHeader),
+        "PROP": ("file_prop", parseFileProperties),
+        "CONT": ("content_desc", parseContentDescription),
+        "MDPR": ("stream_prop[]", parseMediaPropertiesHeader),
+        "DATA": ("data[]", None),
+        "INDX": ("file_index[]", None)
+    }
+
+    def createValueFunc(self):
+        return self.value_func(self)
+
+    def __init__(self, parent, name, description=None):
+        FieldSet.__init__(self, parent, name, description)
+        self._size = (self["size"].value) * 8
+        tag = self["tag"].value
+        if tag in self.tag_info:
+            self._name, self.parse_func = self.tag_info[tag]
+        else:
+            self._description = ""
+            self.parse_func = None
+
+    def createFields(self):
+        yield String(self, "tag", 4, "Chunk FourCC", charset="ASCII")
+        yield UInt32(self, "size", "Chunk Size")
+        yield UInt16(self, "version", "Chunk Version")
+
+        if self.parse_func:
+            for field in self.parse_func(self):
+                yield field
+        else:
+            size = (self.size - self.current_size) // 8
+            if size:
+                yield RawBytes(self, "raw", size)
+
+    def createDescription(self):
+        return "Chunk: %s" % self["tag"].display
+
+class RealMediaFile(Parser):
+    MAGIC = '.RMF\0\0\0\x12\0\1'    # (magic, size=18, version=1)
+    PARSER_TAGS = {
+        "id": "real_media",
+        "category": "container",
+        "file_ext": ("rm",),
+        "mime": (
+            u"video/x-pn-realvideo",
+            u"audio/x-pn-realaudio",
+            u"audio/x-pn-realaudio-plugin",
+            u"audio/x-real-audio",
+            u"application/vnd.rn-realmedia"),
+        "min_size": len(MAGIC)*8, # just the identifier
+        "magic": ((MAGIC, 0),),
+        "description": u"RealMedia (rm) Container File",
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != '.RMF':
+            return "Invalid magic"
+        if self["header/size"].value != 18:
+            return "Invalid header size"
+        if self["header/version"].value not in (0, 1):
+            return "Unknown file format version (%s)" % self["header/version"].value
+        return True
+
+    def createFields(self):
+        while not self.eof:
+            yield Chunk(self, "chunk")
+
+    def createMimeType(self):
+        for prop in self.array("stream_prop"):
+            if prop["mime_type"].value == "video/x-pn-realvideo":
+                return u"video/x-pn-realvideo"
+        return u"audio/x-pn-realaudio"
+
diff --git a/lib/hachoir_parser/container/riff.py b/lib/hachoir_parser/container/riff.py
new file mode 100644
index 0000000000000000000000000000000000000000..a5e4fc0ac10c6275abc5bd2c8bae7a7f3ac1ac75
--- /dev/null
+++ b/lib/hachoir_parser/container/riff.py
@@ -0,0 +1,439 @@
+# -*- coding: UTF-8 -*-
+
+"""
+RIFF parser, able to parse:
+   * AVI video container
+   * WAV audio container
+   * CDA file
+
+Documents:
+- libavformat source code from ffmpeg library
+  http://ffmpeg.mplayerhq.hu/
+- Video for Windows Programmer's Guide
+  http://www.opennet.ru/docs/formats/avi.txt
+- What is an animated cursor?
+  http://www.gdgsoft.com/anituner/help/aniformat.htm
+
+Authors:
+   * Aurélien Jacobs
+   * Mickaël KENIKSSI
+   * Victor Stinner
+Changelog:
+   * 2007-03-30: support ACON (animated icons)
+   * 2006-08-08: merge AVI, WAV and CDA parsers into RIFF parser
+   * 2006-08-03: creation of CDA parser by Mickaël KENIKSSI
+   * 2005-06-21: creation of WAV parser by Victor Stinner
+   * 2005-06-08: creation of AVI parser by Victor Stinner and Aurélien Jacobs
+Thanks to:
+   * Wojtek Kaniewski (wojtekka AT logonet.com.pl) for its CDA file
+     format information
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt16, UInt32, Enum,
+    Bit, NullBits, NullBytes,
+    RawBytes, String, PaddingBytes,
+    SubFile)
+from hachoir_core.tools import alignValue, humanDuration
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import filesizeHandler, textHandler
+from hachoir_parser.video.fourcc import audio_codec_name, video_fourcc_name
+from hachoir_parser.image.ico import IcoFile
+from datetime import timedelta
+
+def parseText(self):
+    yield String(self, "text", self["size"].value,
+        strip=" \0", truncate="\0",
+        charset="ISO-8859-1")
+
+def parseRawFormat(self, size):
+    yield RawBytes(self, "raw_format", size)
+
+def parseVideoFormat(self, size):
+    yield UInt32(self, "video_size", "Video format: Size")
+    yield UInt32(self, "width", "Video format: Width")
+    yield UInt32(self, "height", "Video format: Height")
+    yield UInt16(self, "panes", "Video format: Panes")
+    yield UInt16(self, "depth", "Video format: Depth")
+    yield UInt32(self, "tag1", "Video format: Tag1")
+    yield UInt32(self, "img_size", "Video format: Image size")
+    yield UInt32(self, "xpels_meter", "Video format: XPelsPerMeter")
+    yield UInt32(self, "ypels_meter", "Video format: YPelsPerMeter")
+    yield UInt32(self, "clr_used", "Video format: ClrUsed")
+    yield UInt32(self, "clr_important", "Video format: ClrImportant")
+
+def parseAudioFormat(self, size):
+    yield Enum(UInt16(self, "codec", "Audio format: Codec id"), audio_codec_name)
+    yield UInt16(self, "channel", "Audio format: Channels")
+    yield UInt32(self, "sample_rate", "Audio format: Sample rate")
+    yield UInt32(self, "bit_rate", "Audio format: Bit rate")
+    yield UInt16(self, "block_align", "Audio format: Block align")
+    if size >= 16:
+        yield UInt16(self, "bits_per_sample", "Audio format: Bits per sample")
+    if size >= 18:
+        yield UInt16(self, "ext_size", "Audio format: Size of extra information")
+    if size >= 28: # and self["a_channel"].value > 2
+        yield UInt16(self, "reserved", "Audio format: ")
+        yield UInt32(self, "channel_mask", "Audio format: channels placement bitmask")
+        yield UInt32(self, "subformat", "Audio format: Subformat id")
+
+def parseAVIStreamFormat(self):
+    size = self["size"].value
+    strtype = self["../stream_hdr/stream_type"].value
+    TYPE_HANDLER = {
+        "vids": (parseVideoFormat, 40),
+        "auds": (parseAudioFormat, 16)
+    }
+    handler = parseRawFormat
+    if strtype in TYPE_HANDLER:
+        info = TYPE_HANDLER[strtype]
+        if info[1] <= size:
+            handler = info[0]
+    for field in handler(self, size):
+        yield field
+
+def parseAVIStreamHeader(self):
+    if self["size"].value != 56:
+        raise ParserError("Invalid stream header size")
+    yield String(self, "stream_type", 4, "Stream type four character code", charset="ASCII")
+    field = String(self, "fourcc", 4, "Stream four character code", strip=" \0", charset="ASCII")
+    if self["stream_type"].value == "vids":
+        yield Enum(field, video_fourcc_name, lambda text: text.upper())
+    else:
+        yield field
+    yield UInt32(self, "flags", "Stream flags")
+    yield UInt16(self, "priority", "Stream priority")
+    yield String(self, "language", 2, "Stream language", charset="ASCII", strip="\0")
+    yield UInt32(self, "init_frames", "InitialFrames")
+    yield UInt32(self, "scale", "Time scale")
+    yield UInt32(self, "rate", "Divide by scale to give frame rate")
+    yield UInt32(self, "start", "Stream start time (unit: rate/scale)")
+    yield UInt32(self, "length", "Stream length (unit: rate/scale)")
+    yield UInt32(self, "buf_size", "Suggested buffer size")
+    yield UInt32(self, "quality", "Stream quality")
+    yield UInt32(self, "sample_size", "Size of samples")
+    yield UInt16(self, "left", "Destination rectangle (left)")
+    yield UInt16(self, "top", "Destination rectangle (top)")
+    yield UInt16(self, "right", "Destination rectangle (right)")
+    yield UInt16(self, "bottom", "Destination rectangle (bottom)")
+
+class RedBook(FieldSet):
+    """
+    RedBook offset parser, used in CD audio (.cda) file
+    """
+    def createFields(self):
+        yield UInt8(self, "frame")
+        yield UInt8(self, "second")
+        yield UInt8(self, "minute")
+        yield PaddingBytes(self, "notused", 1)
+
+def formatSerialNumber(field):
+    """
+    Format an disc serial number.
+    Eg. 0x00085C48 => "0008-5C48"
+    """
+    sn = field.value
+    return "%04X-%04X" % (sn >> 16, sn & 0xFFFF)
+
+def parseCDDA(self):
+    """
+    HSG address format: number of 1/75 second
+
+    HSG offset = (minute*60 + second)*75 + frame + 150 (from RB offset)
+    HSG length = (minute*60 + second)*75 + frame (from RB length)
+    """
+    yield UInt16(self, "cda_version", "CD file version (currently 1)")
+    yield UInt16(self, "track_no", "Number of track")
+    yield textHandler(UInt32(self, "disc_serial", "Disc serial number"),
+        formatSerialNumber)
+    yield UInt32(self, "hsg_offset", "Track offset (HSG format)")
+    yield UInt32(self, "hsg_length", "Track length (HSG format)")
+    yield RedBook(self, "rb_offset", "Track offset (Red-book format)")
+    yield RedBook(self, "rb_length", "Track length (Red-book format)")
+
+def parseWAVFormat(self):
+    size = self["size"].value
+    if size not in (16, 18):
+        self.warning("Format with size of %s bytes is not supported!" % size)
+    yield Enum(UInt16(self, "codec", "Audio codec"), audio_codec_name)
+    yield UInt16(self, "nb_channel", "Number of audio channel")
+    yield UInt32(self, "sample_per_sec", "Sample per second")
+    yield UInt32(self, "byte_per_sec", "Average byte per second")
+    yield UInt16(self, "block_align", "Block align")
+    yield UInt16(self, "bit_per_sample", "Bits per sample")
+
+def parseWAVFact(self):
+    yield UInt32(self, "nb_sample", "Number of samples in audio stream")
+
+def parseAviHeader(self):
+    yield UInt32(self, "microsec_per_frame", "Microsecond per frame")
+    yield UInt32(self, "max_byte_per_sec", "Maximum byte per second")
+    yield NullBytes(self, "reserved", 4)
+
+    # Flags
+    yield NullBits(self, "reserved[]", 4)
+    yield Bit(self, "has_index")
+    yield Bit(self, "must_use_index")
+    yield NullBits(self, "reserved[]", 2)
+    yield Bit(self, "is_interleaved")
+    yield NullBits(self, "reserved[]", 2)
+    yield Bit(self, "trust_cktype")
+    yield NullBits(self, "reserved[]", 4)
+    yield Bit(self, "was_capture_file")
+    yield Bit(self, "is_copyrighted")
+    yield NullBits(self, "reserved[]", 14)
+
+    yield UInt32(self, "total_frame", "Total number of frames in the video")
+    yield UInt32(self, "init_frame", "Initial frame (used in interleaved video)")
+    yield UInt32(self, "nb_stream", "Number of streams")
+    yield UInt32(self, "sug_buf_size", "Suggested buffer size")
+    yield UInt32(self, "width", "Width in pixel")
+    yield UInt32(self, "height", "Height in pixel")
+    yield UInt32(self, "scale")
+    yield UInt32(self, "rate")
+    yield UInt32(self, "start")
+    yield UInt32(self, "length")
+
+def parseODML(self):
+    yield UInt32(self, "total_frame", "Real number of frame of OpenDML video")
+    padding = self["size"].value - 4
+    if 0 < padding:
+        yield NullBytes(self, "padding[]", padding)
+
+class AVIIndexEntry(FieldSet):
+    size = 16*8
+    def createFields(self):
+        yield String(self, "tag", 4, "Tag", charset="ASCII")
+        yield UInt32(self, "flags")
+        yield UInt32(self, "start", "Offset from start of movie data")
+        yield UInt32(self, "length")
+
+def parseIndex(self):
+    while not self.eof:
+        yield AVIIndexEntry(self, "index[]")
+
+class Chunk(FieldSet):
+    TAG_INFO = {
+        # This dictionnary is edited by RiffFile.validate()
+
+        "LIST": ("list[]", None, "Sub-field list"),
+        "JUNK": ("junk[]", None, "Junk (padding)"),
+
+        # Metadata
+        "INAM": ("title", parseText, "Document title"),
+        "IART": ("artist", parseText, "Artist"),
+        "ICMT": ("comment", parseText, "Comment"),
+        "ICOP": ("copyright", parseText, "Copyright"),
+        "IENG": ("author", parseText, "Author"),
+        "ICRD": ("creation_date", parseText, "Creation date"),
+        "ISFT": ("producer", parseText, "Producer"),
+        "IDIT": ("datetime", parseText, "Date time"),
+
+        # TODO: Todo: see below
+        # "strn": Stream description
+        # TWOCC code, movie/field[]/tag.value[2:4]:
+        #   "db": "Uncompressed video frame",
+        #   "dc": "Compressed video frame",
+        #   "wb": "Audio data",
+        #   "pc": "Palette change"
+    }
+
+    subtag_info = {
+        "INFO": ("info", "File informations"),
+        "hdrl": ("headers", "Headers"),
+        "strl": ("stream[]", "Stream header list"),
+        "movi": ("movie", "Movie stream"),
+        "odml": ("odml", "ODML"),
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = (8 + alignValue(self["size"].value, 2)) * 8
+        tag = self["tag"].value
+        if tag in self.TAG_INFO:
+            self.tag_info = self.TAG_INFO[tag]
+            if tag == "LIST":
+                subtag = self["subtag"].value
+                if subtag in self.subtag_info:
+                    info = self.subtag_info[subtag]
+                    self.tag_info = (info[0], None, info[1])
+            self._name = self.tag_info[0]
+            self._description = self.tag_info[2]
+        else:
+            self.tag_info = ("field[]", None, None)
+
+    def createFields(self):
+        yield String(self, "tag", 4, "Tag", charset="ASCII")
+        yield filesizeHandler(UInt32(self, "size", "Size"))
+        if not self["size"].value:
+            return
+        if self["tag"].value == "LIST":
+            yield String(self, "subtag", 4, "Sub-tag", charset="ASCII")
+            handler = self.tag_info[1]
+            while 8 < (self.size - self.current_size)/8:
+                field = self.__class__(self, "field[]")
+                yield field
+                if (field.size/8) % 2 != 0:
+                    yield UInt8(self, "padding[]", "Padding")
+        else:
+            handler = self.tag_info[1]
+            if handler:
+                for field in handler(self):
+                    yield field
+            else:
+                yield RawBytes(self, "raw_content", self["size"].value)
+            padding = self.seekBit(self._size)
+            if padding:
+                yield padding
+
+    def createDescription(self):
+        tag = self["tag"].display
+        return u"Chunk (tag %s)" % tag
+
+class ChunkAVI(Chunk):
+    TAG_INFO = Chunk.TAG_INFO.copy()
+    TAG_INFO.update({
+        "strh": ("stream_hdr", parseAVIStreamHeader, "Stream header"),
+        "strf": ("stream_fmt", parseAVIStreamFormat, "Stream format"),
+        "avih": ("avi_hdr", parseAviHeader, "AVI header"),
+        "idx1": ("index", parseIndex, "Stream index"),
+        "dmlh": ("odml_hdr", parseODML, "ODML header"),
+    })
+
+class ChunkCDDA(Chunk):
+    TAG_INFO = Chunk.TAG_INFO.copy()
+    TAG_INFO.update({
+        'fmt ': ("cdda", parseCDDA, "CD audio informations"),
+    })
+
+class ChunkWAVE(Chunk):
+    TAG_INFO = Chunk.TAG_INFO.copy()
+    TAG_INFO.update({
+        'fmt ': ("format", parseWAVFormat, "Audio format"),
+        'fact': ("nb_sample", parseWAVFact, "Number of samples"),
+        'data': ("audio_data", None, "Audio stream data"),
+    })
+
+def parseAnimationHeader(self):
+    yield UInt32(self, "hdr_size", "Size of header (36 bytes)")
+    if self["hdr_size"].value != 36:
+        self.warning("Animation header with unknown size (%s)" % self["size"].value)
+    yield UInt32(self, "nb_frame", "Number of unique Icons in this cursor")
+    yield UInt32(self, "nb_step", "Number of Blits before the animation cycles")
+    yield UInt32(self, "cx")
+    yield UInt32(self, "cy")
+    yield UInt32(self, "bit_count")
+    yield UInt32(self, "planes")
+    yield UInt32(self, "jiffie_rate", "Default Jiffies (1/60th of a second) if rate chunk not present")
+    yield Bit(self, "is_icon")
+    yield NullBits(self, "padding", 31)
+
+def parseAnimationSequence(self):
+    while not self.eof:
+        yield UInt32(self, "icon[]")
+
+def formatJiffie(field):
+    sec = float(field.value) / 60
+    return humanDuration(timedelta(seconds=sec))
+
+def parseAnimationRate(self):
+    while not self.eof:
+        yield textHandler(UInt32(self, "rate[]"), formatJiffie)
+
+def parseIcon(self):
+    yield SubFile(self, "icon_file", self["size"].value, parser_class=IcoFile)
+
+class ChunkACON(Chunk):
+    TAG_INFO = Chunk.TAG_INFO.copy()
+    TAG_INFO.update({
+        'anih': ("anim_hdr", parseAnimationHeader, "Animation header"),
+        'seq ': ("anim_seq", parseAnimationSequence, "Animation sequence"),
+        'rate': ("anim_rate", parseAnimationRate, "Animation sequence"),
+        'icon': ("icon[]", parseIcon, "Icon"),
+    })
+
+class RiffFile(Parser):
+    PARSER_TAGS = {
+        "id": "riff",
+        "category": "container",
+        "file_ext": ("avi", "cda", "wav", "ani"),
+        "min_size": 16*8,
+        "mime": (u"video/x-msvideo", u"audio/x-wav", u"audio/x-cda"),
+        # FIXME: Use regex "RIFF.{4}(WAVE|CDDA|AVI )"
+        "magic": (
+            ("AVI LIST", 8*8),
+            ("WAVEfmt ", 8*8),
+            ("CDDAfmt ", 8*8),
+            ("ACONanih", 8*8),
+        ),
+        "description": "Microsoft RIFF container"
+    }
+    VALID_TYPES = {
+        "WAVE": (ChunkWAVE, u"audio/x-wav",     u"Microsoft WAVE audio", ".wav"),
+        "CDDA": (ChunkCDDA, u"audio/x-cda",     u"Microsoft Windows audio CD file (cda)", ".cda"),
+        "AVI ": (ChunkAVI,  u"video/x-msvideo", u"Microsoft AVI video", ".avi"),
+        "ACON": (ChunkACON, u"image/x-ani",     u"Microsoft Windows animated cursor", ".ani"),
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != "RIFF":
+            return "Wrong signature"
+        if self["type"].value not in self.VALID_TYPES:
+            return "Unknown RIFF content type"
+        return True
+
+    def createFields(self):
+        yield String(self, "signature", 4, "AVI header (RIFF)", charset="ASCII")
+        yield filesizeHandler(UInt32(self, "filesize", "File size"))
+        yield String(self, "type", 4, "Content type (\"AVI \", \"WAVE\", ...)", charset="ASCII")
+
+        # Choose chunk type depending on file type
+        try:
+            chunk_cls = self.VALID_TYPES[self["type"].value][0]
+        except KeyError:
+            chunk_cls = Chunk
+
+        # Parse all chunks up to filesize
+        while self.current_size < self["filesize"].value*8+8:
+            yield chunk_cls(self, "chunk[]")
+        if not self.eof:
+            yield RawBytes(self, "padding[]", (self.size-self.current_size)/8)
+
+    def createMimeType(self):
+        try:
+            return self.VALID_TYPES[self["type"].value][1]
+        except KeyError:
+            return None
+
+    def createDescription(self):
+        tag = self["type"].value
+        if tag == "AVI ":
+            desc = u"Microsoft AVI video"
+            if "headers/avi_hdr" in self:
+                header = self["headers/avi_hdr"]
+                desc += ": %ux%u pixels" % (header["width"].value, header["height"].value)
+                microsec = header["microsec_per_frame"].value
+                if microsec:
+                    desc += ", %.1f fps" % (1000000.0 / microsec)
+                    if "total_frame" in header and header["total_frame"].value:
+                        delta = timedelta(seconds=float(header["total_frame"].value) * microsec)
+                        desc += ", " + humanDuration(delta)
+            return desc
+        else:
+            try:
+                return self.VALID_TYPES[tag][2]
+            except KeyError:
+                return u"Microsoft RIFF container"
+
+    def createContentSize(self):
+        size = (self["filesize"].value + 8) * 8
+        return min(size, self.stream.size)
+
+    def createFilenameSuffix(self):
+        try:
+            return self.VALID_TYPES[self["type"].value][3]
+        except KeyError:
+            return ".riff"
+
diff --git a/lib/hachoir_parser/container/swf.py b/lib/hachoir_parser/container/swf.py
new file mode 100644
index 0000000000000000000000000000000000000000..951c62192c83cca88fedd57759d4c19d85714071
--- /dev/null
+++ b/lib/hachoir_parser/container/swf.py
@@ -0,0 +1,400 @@
+"""
+SWF (Macromedia/Adobe Flash) file parser.
+
+Documentation:
+
+ - Alexis' SWF Reference:
+   http://www.m2osw.com/swf_alexref.html
+ - http://www.half-serious.com/swf/format/
+ - http://www.anotherbigidea.com/javaswf/
+ - http://www.gnu.org/software/gnash/
+
+Author: Victor Stinner
+Creation date: 29 october 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    Bit, Bits, UInt8, UInt32, UInt16, CString, Enum,
+    Bytes, RawBytes, NullBits, String, SubFile)
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, filesizeHandler
+from hachoir_core.tools import paddingSize, humanFrequency
+from hachoir_parser.image.common import RGB
+from hachoir_parser.image.jpeg import JpegChunk, JpegFile
+from hachoir_core.stream import StringInputStream, ConcatStream
+from hachoir_parser.common.deflate import Deflate, has_deflate
+from hachoir_parser.container.action_script import parseActionScript
+import math
+
+# Maximum file size (50 MB)
+MAX_FILE_SIZE = 50 * 1024 * 1024
+
+TWIPS = 20
+
+class RECT(FieldSet):
+    endian = BIG_ENDIAN
+    def createFields(self):
+        yield Bits(self, "nbits", 5)
+        nbits = self["nbits"].value
+        if not nbits:
+            raise ParserError("SWF parser: Invalid RECT field size (0)")
+        yield Bits(self, "xmin", nbits, "X minimum in twips")
+        yield Bits(self, "xmax", nbits, "X maximum in twips")
+        yield Bits(self, "ymin", nbits, "Y minimum in twips")
+        yield Bits(self, "ymax", nbits, "Y maximum in twips")
+        size = paddingSize(self.current_size, 8)
+        if size:
+            yield NullBits(self, "padding", size)
+
+    def getWidth(self):
+        return math.ceil(float(self["xmax"].value) / TWIPS)
+    def getHeight(self):
+        return math.ceil(float(self["ymax"].value) / TWIPS)
+
+    def createDescription(self):
+        return "Rectangle: %ux%u" % (self.getWidth(), self.getHeight())
+
+class FixedFloat16(FieldSet):
+    def createFields(self):
+        yield UInt8(self, "float_part")
+        yield UInt8(self, "int_part")
+
+    def createValue(self):
+        return self["int_part"].value +  float(self["float_part"].value) / 256
+
+def parseBackgroundColor(parent, size):
+    yield RGB(parent, "color")
+
+def bit2hertz(field):
+    return humanFrequency(5512.5 * (2 ** field.value))
+
+SOUND_CODEC_MP3 = 2
+SOUND_CODEC = {
+    0: "RAW",
+    1: "ADPCM",
+    SOUND_CODEC_MP3: "MP3",
+    3: "Uncompressed",
+    6: "Nellymoser",
+}
+
+class SoundEnvelope(FieldSet):
+    def createFields(self):
+        yield UInt8(self, "count")
+        for index in xrange(self["count"].value):
+            yield UInt32(self, "mark44[]")
+            yield UInt16(self, "level0[]")
+            yield UInt16(self, "level1[]")
+
+def parseSoundBlock(parent, size):
+    # TODO: Be able to get codec... Need to know last sound "def_sound[]" field
+#    if not (...)sound_header:
+#        raise ParserError("Sound block without header")
+    if True: #sound_header == SOUND_CODEC_MP3:
+        yield UInt16(parent, "samples")
+        yield UInt16(parent, "left")
+    size = (parent.size - parent.current_size) // 8
+    if size:
+        yield RawBytes(parent, "music_data", size)
+
+def parseStartSound(parent, size):
+    yield UInt16(parent, "sound_id")
+    yield Bit(parent, "has_in_point")
+    yield Bit(parent, "has_out_point")
+    yield Bit(parent, "has_loops")
+    yield Bit(parent, "has_envelope")
+    yield Bit(parent, "no_multiple")
+    yield Bit(parent, "stop_playback")
+    yield NullBits(parent, "reserved", 2)
+
+    if parent["has_in_point"].value:
+        yield UInt32(parent, "in_point")
+    if parent["has_out_point"].value:
+        yield UInt32(parent, "out_point")
+    if parent["has_loops"].value:
+        yield UInt16(parent, "loop_count")
+    if parent["has_envelope"].value:
+        yield SoundEnvelope(parent, "envelope")
+
+def parseDefineSound(parent, size):
+    yield UInt16(parent, "sound_id")
+
+    yield Bit(parent, "is_stereo")
+    yield Bit(parent, "is_16bit")
+    yield textHandler(Bits(parent, "rate", 2), bit2hertz)
+    yield Enum(Bits(parent, "codec", 4), SOUND_CODEC)
+
+    yield UInt32(parent, "sample_count")
+
+    if parent["codec"].value == SOUND_CODEC_MP3:
+        yield UInt16(parent, "len")
+
+    size = (parent.size - parent.current_size) // 8
+    if size:
+        yield RawBytes(parent, "music_data", size)
+
+def parseSoundHeader(parent, size):
+    yield Bit(parent, "playback_is_stereo")
+    yield Bit(parent, "playback_is_16bit")
+    yield textHandler(Bits(parent, "playback_rate", 2), bit2hertz)
+    yield NullBits(parent, "reserved", 4)
+
+    yield Bit(parent, "sound_is_stereo")
+    yield Bit(parent, "sound_is_16bit")
+    yield textHandler(Bits(parent, "sound_rate", 2), bit2hertz)
+    yield Enum(Bits(parent, "codec", 4), SOUND_CODEC)
+
+    yield UInt16(parent, "sample_count")
+
+    if parent["codec"].value == 2:
+        yield UInt16(parent, "latency_seek")
+
+class JpegHeader(FieldSet):
+    endian = BIG_ENDIAN
+    def createFields(self):
+        count = 1
+        while True:
+            chunk = JpegChunk(self, "jpeg_chunk[]")
+            yield chunk
+            if 1 < count and chunk["type"].value in (JpegChunk.TAG_SOI, JpegChunk.TAG_EOI):
+                break
+            count += 1
+
+def parseJpeg(parent, size):
+    yield UInt16(parent, "char_id", "Character identifier")
+    size -= 2
+
+    code = parent["code"].value
+    if code != Tag.TAG_BITS:
+        if code == Tag.TAG_BITS_JPEG3:
+            yield UInt32(parent, "alpha_offset", "Character identifier")
+            size -= 4
+
+        addr = parent.absolute_address + parent.current_size + 16
+        if parent.stream.readBytes(addr, 2) in ("\xff\xdb", "\xff\xd8"):
+            header = JpegHeader(parent, "jpeg_header")
+            yield header
+            hdr_size = header.size // 8
+            size -= hdr_size
+        else:
+            hdr_size = 0
+
+        if code == Tag.TAG_BITS_JPEG3:
+            img_size = parent["alpha_offset"].value - hdr_size
+        else:
+            img_size = size
+    else:
+        img_size = size
+    yield SubFile(parent, "image", img_size, "JPEG picture", parser=JpegFile)
+    if code == Tag.TAG_BITS_JPEG3:
+        size = (parent.size - parent.current_size) // 8
+        yield RawBytes(parent, "alpha", size, "Image data")
+
+def parseVideoFrame(parent, size):
+    yield UInt16(parent, "stream_id")
+    yield UInt16(parent, "frame_num")
+    if 4 < size:
+        yield RawBytes(parent, "video_data", size-4)
+
+class Export(FieldSet):
+    def createFields(self):
+        yield UInt16(self, "object_id")
+        yield CString(self, "name")
+
+def parseExport(parent, size):
+    yield UInt16(parent, "count")
+    for index in xrange(parent["count"].value):
+        yield Export(parent, "export[]")
+
+class Tag(FieldSet):
+    TAG_BITS = 6
+    TAG_BITS_JPEG2 = 32
+    TAG_BITS_JPEG3 = 35
+    TAG_INFO = {
+        # SWF version 1.0
+         0: ("end[]", "End", None),
+         1: ("show_frame[]", "Show frame", None),
+         2: ("def_shape[]", "Define shape", None),
+         3: ("free_char[]", "Free character", None),
+         4: ("place_obj[]", "Place object", None),
+         5: ("remove_obj[]", "Remove object", None),
+         6: ("def_bits[]", "Define bits", parseJpeg),
+         7: ("def_but[]", "Define button", None),
+         8: ("jpg_table", "JPEG tables", None),
+         9: ("bkgd_color[]", "Set background color", parseBackgroundColor),
+        10: ("def_font[]", "Define font", None),
+        11: ("def_text[]", "Define text", None),
+        12: ("action[]", "Action script", parseActionScript),
+        13: ("def_font_info[]", "Define font info", None),
+
+        # SWF version 2.0
+        14: ("def_sound[]", "Define sound", parseDefineSound),
+        15: ("start_sound[]", "Start sound", parseStartSound),
+        16: ("stop_sound[]", "Stop sound", None),
+        17: ("def_but_sound[]", "Define button sound", None),
+        18: ("sound_hdr", "Sound stream header", parseSoundHeader),
+        19: ("sound_blk[]", "Sound stream block", parseSoundBlock),
+        20: ("def_bits_lossless[]", "Define bits lossless", None),
+        21: ("def_bits_jpeg2[]", "Define bits JPEG 2", parseJpeg),
+        22: ("def_shape2[]", "Define shape 2", None),
+        23: ("def_but_cxform[]", "Define button CXFORM", None),
+        24: ("protect", "File is protected", None),
+
+        # SWF version 3.0
+        25: ("path_are_ps[]", "Paths are Postscript", None),
+        26: ("place_obj2[]", "Place object 2", None),
+        28: ("remove_obj2[]", "Remove object 2", None),
+        29: ("sync_frame[]", "Synchronize frame", None),
+        31: ("free_all[]", "Free all", None),
+        32: ("def_shape3[]", "Define shape 3", None),
+        33: ("def_text2[]", "Define text 2", None),
+        34: ("def_but2[]", "Define button2", None),
+        35: ("def_bits_jpeg3[]", "Define bits JPEG 3", parseJpeg),
+        36: ("def_bits_lossless2[]", "Define bits lossless 2", None),
+        39: ("def_sprite[]", "Define sprite", None),
+        40: ("name_character[]", "Name character", None),
+        41: ("serial_number", "Serial number", None),
+        42: ("generator_text[]", "Generator text", None),
+        43: ("frame_label[]", "Frame label", None),
+        45: ("sound_hdr2[]", "Sound stream header2", parseSoundHeader),
+        46: ("def_morph_shape[]", "Define morph shape", None),
+        47: ("gen_frame[]", "Generate frame", None),
+        48: ("def_font2[]", "Define font 2", None),
+        49: ("tpl_command[]", "Template command", None),
+
+        # SWF version 4.0
+        37: ("def_text_field[]", "Define text field", None),
+        38: ("def_quicktime_movie[]", "Define QuickTime movie", None),
+
+        # SWF version 5.0
+        50: ("def_cmd_obj[]", "Define command object", None),
+        51: ("flash_generator", "Flash generator", None),
+        52: ("gen_ext_font[]", "Gen external font", None),
+        56: ("export[]", "Export", parseExport),
+        57: ("import[]", "Import", None),
+        58: ("ebnable_debug", "Enable debug", None),
+
+        # SWF version 6.0
+        59: ("do_init_action[]", "Do init action", None),
+        60: ("video_str[]", "Video stream", None),
+        61: ("video_frame[]", "Video frame", parseVideoFrame),
+        62: ("def_font_info2[]", "Define font info 2", None),
+        63: ("mx4[]", "MX4", None),
+        64: ("enable_debug2", "Enable debugger 2", None),
+
+        # SWF version 7.0
+        65: ("script_limits[]", "Script limits", None),
+        66: ("tab_index[]", "Set tab index", None),
+
+        # SWF version 8.0
+        69: ("file_attr[]", "File attributes", None),
+        70: ("place_obj3[]", "Place object 3", None),
+        71: ("import2[]", "Import a definition list from another movie", None),
+        73: ("def_font_align[]", "Define font alignment zones", None),
+        74: ("csm_txt_set[]", "CSM text settings", None),
+        75: ("def_font3[]", "Define font text 3", None),
+        77: ("metadata[]", "XML code describing the movie", None),
+        78: ("def_scale_grid[]", "Define scaling factors", None),
+        83: ("def_shape4[]", "Define shape 4", None),
+        84: ("def_morph2[]", "Define a morphing shape 2", None),
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        size = self["length"].value
+        if self[0].name == "length_ext":
+            self._size = (6+size) * 8
+        else:
+            self._size = (2+size) * 8
+        code = self["code"].value
+        if code in self.TAG_INFO:
+            self._name, self._description, self.parser = self.TAG_INFO[code]
+        else:
+            self.parser = None
+
+    def createFields(self):
+        if self.stream.readBits(self.absolute_address, 6, self.endian) == 63:
+            yield Bits(self, "length_ext", 6)
+            yield Bits(self, "code", 10)
+            yield filesizeHandler(UInt32(self, "length"))
+        else:
+            yield filesizeHandler(Bits(self, "length", 6))
+            yield Bits(self, "code", 10)
+        size = self["length"].value
+        if 0 < size:
+            if self.parser:
+                for field in self.parser(self, size):
+                    yield field
+            else:
+                yield RawBytes(self, "data", size)
+
+    def createDescription(self):
+        return "Tag: %s (%s)" % (self["code"].display, self["length"].display)
+
+class SwfFile(Parser):
+    VALID_VERSIONS = set(xrange(1, 9+1))
+    PARSER_TAGS = {
+        "id": "swf",
+        "category": "container",
+        "file_ext": ["swf"],
+        "mime": (u"application/x-shockwave-flash",),
+        "min_size": 64,
+        "description": u"Macromedia Flash data"
+    }
+    PARSER_TAGS["magic"] = []
+    for version in VALID_VERSIONS:
+        PARSER_TAGS["magic"].append(("FWS%c" % version, 0))
+        PARSER_TAGS["magic"].append(("CWS%c" % version, 0))
+    endian = LITTLE_ENDIAN
+    SWF_SCALE_FACTOR = 1.0 / 20
+
+    def validate(self):
+        if self.stream.readBytes(0, 3) not in ("FWS", "CWS"):
+            return "Wrong file signature"
+        if self["version"].value not in self.VALID_VERSIONS:
+            return "Unknown version"
+        if MAX_FILE_SIZE < self["filesize"].value:
+            return "File too big (%u)" % self["filesize"].value
+        if self["signature"].value == "FWS":
+            if self["rect/padding"].value != 0:
+                return "Unknown rectangle padding value"
+        return True
+
+    def createFields(self):
+        yield String(self, "signature", 3, "SWF format signature", charset="ASCII")
+        yield UInt8(self, "version")
+        yield filesizeHandler(UInt32(self, "filesize"))
+        if self["signature"].value != "CWS":
+            yield RECT(self, "rect")
+            yield FixedFloat16(self, "frame_rate")
+            yield UInt16(self, "frame_count")
+
+            while not self.eof:
+                yield Tag(self, "tag[]")
+        else:
+            size = (self.size - self.current_size) // 8
+            if has_deflate:
+                data = Deflate(Bytes(self, "compressed_data", size), False)
+                def createInputStream(cis, source=None, **args):
+                    stream = cis(source=source)
+                    header = StringInputStream("FWS" + self.stream.readBytes(3*8, 5))
+                    args.setdefault("tags",[]).append(("class", SwfFile))
+                    return ConcatStream((header, stream), source=stream.source, **args)
+                data.setSubIStream(createInputStream)
+                yield data
+            else:
+                yield Bytes(self, "compressed_data", size)
+
+    def createDescription(self):
+        desc = ["version %u" % self["version"].value]
+        if self["signature"].value == "CWS":
+            desc.append("compressed")
+        return u"Macromedia Flash data: %s" % (", ".join(desc))
+
+    def createContentSize(self):
+        if self["signature"].value == "FWS":
+            return self["filesize"].value * 8
+        else:
+            # TODO: Size of compressed Flash?
+            return None
+
diff --git a/lib/hachoir_parser/file_system/__init__.py b/lib/hachoir_parser/file_system/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..863aae3acef75a373851d81ccaf3bd128117498b
--- /dev/null
+++ b/lib/hachoir_parser/file_system/__init__.py
@@ -0,0 +1,8 @@
+from hachoir_parser.file_system.ext2 import EXT2_FS
+from hachoir_parser.file_system.fat import FAT12, FAT16, FAT32
+from hachoir_parser.file_system.mbr import MSDos_HardDrive
+from hachoir_parser.file_system.ntfs import NTFS
+from hachoir_parser.file_system.iso9660 import ISO9660
+from hachoir_parser.file_system.reiser_fs import REISER_FS
+from hachoir_parser.file_system.linux_swap import LinuxSwapFile
+
diff --git a/lib/hachoir_parser/file_system/ext2.py b/lib/hachoir_parser/file_system/ext2.py
new file mode 100644
index 0000000000000000000000000000000000000000..634fe0634798458551f15866dcc702c9c2cf54e6
--- /dev/null
+++ b/lib/hachoir_parser/file_system/ext2.py
@@ -0,0 +1,464 @@
+"""
+EXT2 (Linux) file system parser.
+
+Author: Victor Stinner
+
+Sources:
+- EXT2FS source code
+  http://ext2fsd.sourceforge.net/
+- Analysis of the Ext2fs structure
+  http://www.nondot.org/sabre/os/files/FileSystems/ext2fs/
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    Bit, Bits, UInt8, UInt16, UInt32,
+    Enum, String, TimestampUnix32, RawBytes, NullBytes)
+from hachoir_core.tools import (alignValue,
+    humanDuration, humanFilesize)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler
+from itertools import izip
+
+class DirectoryEntry(FieldSet):
+    file_type = {
+        1: "Regular",
+        2: "Directory",
+        3: "Char. dev.",
+        4: "Block dev.",
+        5: "Fifo",
+        6: "Socket",
+        7: "Symlink",
+        8: "Max"
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["rec_len"].value * 8
+
+    def createFields(self):
+        yield UInt32(self, "inode", "Inode")
+        yield UInt16(self, "rec_len", "Record length")
+        yield UInt8(self, "name_len", "Name length")
+        yield Enum(UInt8(self, "file_type", "File type"), self.file_type)
+        yield String(self, "name", self["name_len"].value, "File name")
+        size = (self._size - self.current_size)//8
+        if size:
+            yield NullBytes(self, "padding", size)
+
+    def createDescription(self):
+        name = self["name"].value.strip("\0")
+        if name:
+            return "Directory entry: %s" % name
+        else:
+            return "Directory entry (empty)"
+
+class Inode(FieldSet):
+    inode_type_name = {
+        1: "list of bad blocks",
+        2: "Root directory",
+        3: "ACL inode",
+        4: "ACL inode",
+        5: "Boot loader",
+        6: "Undelete directory",
+        8: "EXT3 journal"
+    }
+    file_type = {
+        1: "Fifo",
+        2: "Character device",
+        4: "Directory",
+        6: "Block device",
+        8: "Regular",
+        10: "Symbolic link",
+        12: "Socket",
+    }
+    file_type_letter = {
+        1: "p",
+        4: "d",
+        2: "c",
+        6: "b",
+        10: "l",
+        12: "s",
+    }
+    static_size = (68 + 15*4)*8
+
+    def __init__(self, parent, name, index):
+        FieldSet.__init__(self, parent, name, None)
+        self.uniq_id = 1+index
+
+    def createDescription(self):
+        desc = "Inode %s: " % self.uniq_id
+        size = self["size"].value
+        if self["blocks"].value == 0:
+            desc += "(unused)"
+        elif 11 <= self.uniq_id:
+            size = humanFilesize(size)
+            desc += "file, size=%s, mode=%s" % (size, self.getMode())
+        else:
+            if self.uniq_id in self.inode_type_name:
+                desc += self.inode_type_name[self.uniq_id]
+                if self.uniq_id == 2:
+                    desc += " (%s)" % self.getMode()
+            else:
+                desc += "special"
+        return desc
+
+    def getMode(self):
+        names = (
+            ("owner_read", "owner_write", "owner_exec"),
+            ("group_read", "group_write", "group_exec"),
+            ("other_read", "other_write", "other_exec"))
+        letters = "rwx"
+        mode = [ "-"  for index in xrange(10) ]
+        index = 1
+        for loop in xrange(3):
+            for name, letter in izip(names[loop], letters):
+                if self[name].value:
+                    mode[index] = letter
+                index += 1
+        file_type = self["file_type"].value
+        if file_type in self.file_type_letter:
+            mode[0] = self.file_type_letter[file_type]
+        return "".join(mode)
+
+    def createFields(self):
+        # File mode
+        yield Bit(self, "other_exec")
+        yield Bit(self, "other_write")
+        yield Bit(self, "other_read")
+        yield Bit(self, "group_exec")
+        yield Bit(self, "group_write")
+        yield Bit(self, "group_read")
+        yield Bit(self, "owner_exec")
+        yield Bit(self, "owner_write")
+        yield Bit(self, "owner_read")
+        yield Bit(self, "sticky")
+        yield Bit(self, "setgid")
+        yield Bit(self, "setuid")
+        yield Enum(Bits(self, "file_type", 4), self.file_type)
+
+        yield UInt16(self, "uid", "User ID")
+        yield UInt32(self, "size", "File size (in bytes)")
+        yield TimestampUnix32(self, "atime", "Last access time")
+        yield TimestampUnix32(self, "ctime", "Creation time")
+        yield TimestampUnix32(self, "mtime", "Last modification time")
+        yield TimestampUnix32(self, "dtime", "Delete time")
+        yield UInt16(self, "gid", "Group ID")
+        yield UInt16(self, "links_count", "Links count")
+        yield UInt32(self, "blocks", "Number of blocks")
+        yield UInt32(self, "flags", "Flags")
+        yield NullBytes(self, "reserved[]", 4, "Reserved")
+        for index in xrange(15):
+            yield UInt32(self, "block[]")
+        yield UInt32(self, "version", "Version")
+        yield UInt32(self, "file_acl", "File ACL")
+        yield UInt32(self, "dir_acl", "Directory ACL")
+        yield UInt32(self, "faddr", "Block where the fragment of the file resides")
+
+        os = self["/superblock/creator_os"].value
+        if os == SuperBlock.OS_LINUX:
+            yield UInt8(self, "frag", "Number of fragments in the block")
+            yield UInt8(self, "fsize", "Fragment size")
+            yield UInt16(self, "padding", "Padding")
+            yield UInt16(self, "uid_high", "High 16 bits of user ID")
+            yield UInt16(self, "gid_high", "High 16 bits of group ID")
+            yield NullBytes(self, "reserved[]", 4, "Reserved")
+        elif os == SuperBlock.OS_HURD:
+            yield UInt8(self, "frag", "Number of fragments in the block")
+            yield UInt8(self, "fsize", "Fragment size")
+            yield UInt16(self, "mode_high", "High 16 bits of mode")
+            yield UInt16(self, "uid_high", "High 16 bits of user ID")
+            yield UInt16(self, "gid_high", "High 16 bits of group ID")
+            yield UInt32(self, "author", "Author ID (?)")
+        else:
+            yield RawBytes(self, "raw", 12, "Reserved")
+
+class Bitmap(FieldSet):
+    def __init__(self, parent, name, start, size, description, **kw):
+        description = "%s: %s items" % (description, size)
+        FieldSet.__init__(self, parent, name, description, size=size, **kw)
+        self.start = 1+start
+
+    def createFields(self):
+        for index in xrange(self._size):
+            yield Bit(self, "item[]", "Item %s" % (self.start+index))
+
+BlockBitmap = Bitmap
+InodeBitmap = Bitmap
+
+class GroupDescriptor(FieldSet):
+    static_size = 32*8
+
+    def __init__(self, parent, name, index):
+        FieldSet.__init__(self, parent, name)
+        self.uniq_id = index
+
+    def createDescription(self):
+        blocks_per_group = self["/superblock/blocks_per_group"].value
+        start = self.uniq_id * blocks_per_group
+        end = start + blocks_per_group
+        return "Group descriptor: blocks %s-%s" % (start, end)
+
+    def createFields(self):
+        yield UInt32(self, "block_bitmap", "Points to the blocks bitmap block")
+        yield UInt32(self, "inode_bitmap", "Points to the inodes bitmap block")
+        yield UInt32(self, "inode_table", "Points to the inodes table first block")
+        yield UInt16(self, "free_blocks_count", "Number of free blocks")
+        yield UInt16(self, "free_inodes_count", "Number of free inodes")
+        yield UInt16(self, "used_dirs_count", "Number of inodes allocated to directories")
+        yield UInt16(self, "padding", "Padding")
+        yield NullBytes(self, "reserved", 12, "Reserved")
+
+class SuperBlock(FieldSet):
+    static_size = 433*8
+
+    OS_LINUX = 0
+    OS_HURD = 1
+    os_name = {
+        0: "Linux",
+        1: "Hurd",
+        2: "Masix",
+        3: "FreeBSD",
+        4: "Lites",
+        5: "WinNT"
+    }
+    state_desc = {
+        1: "Valid (Unmounted cleanly)",
+        2: "Error (Errors detected)",
+        4: "Orphan FS (Orphans being recovered)",
+    }
+    error_handling_desc = { 1: "Continue" }
+
+    def __init__(self, parent, name):
+        FieldSet.__init__(self, parent, name)
+        self._group_count = None
+
+    def createDescription(self):
+        if self["feature_compat"].value & 4:
+            fstype = "ext3"
+        else:
+            fstype = "ext2"
+        return "Superblock: %s file system" % fstype
+
+    def createFields(self):
+        yield UInt32(self, "inodes_count", "Inodes count")
+        yield UInt32(self, "blocks_count", "Blocks count")
+        yield UInt32(self, "r_blocks_count", "Reserved blocks count")
+        yield UInt32(self, "free_blocks_count", "Free blocks count")
+        yield UInt32(self, "free_inodes_count", "Free inodes count")
+        yield UInt32(self, "first_data_block", "First data block")
+        yield UInt32(self, "log_block_size", "Block size")
+        yield UInt32(self, "log_frag_size", "Fragment size")
+        yield UInt32(self, "blocks_per_group", "Blocks per group")
+        yield UInt32(self, "frags_per_group", "Fragments per group")
+        yield UInt32(self, "inodes_per_group", "Inodes per group")
+        yield TimestampUnix32(self, "mtime", "Mount time")
+        yield TimestampUnix32(self, "wtime", "Write time")
+        yield UInt16(self, "mnt_count", "Mount count")
+        yield UInt16(self, "max_mnt_count", "Max mount count")
+        yield String(self, "magic", 2, "Magic number (0x53EF)")
+        yield Enum(UInt16(self, "state", "File system state"), self.state_desc)
+        yield Enum(UInt16(self, "errors", "Behaviour when detecting errors"), self.error_handling_desc)
+        yield UInt16(self, "minor_rev_level", "Minor revision level")
+        yield TimestampUnix32(self, "last_check", "Time of last check")
+        yield textHandler(UInt32(self, "check_interval", "Maximum time between checks"), self.postMaxTime)
+        yield Enum(UInt32(self, "creator_os", "Creator OS"), self.os_name)
+        yield UInt32(self, "rev_level", "Revision level")
+        yield UInt16(self, "def_resuid", "Default uid for reserved blocks")
+        yield UInt16(self, "def_resgid", "Default gid for reserved blocks")
+        yield UInt32(self, "first_ino", "First non-reserved inode")
+        yield UInt16(self, "inode_size", "Size of inode structure")
+        yield UInt16(self, "block_group_nr", "Block group # of this superblock")
+        yield UInt32(self, "feature_compat", "Compatible feature set")
+        yield UInt32(self, "feature_incompat", "Incompatible feature set")
+        yield UInt32(self, "feature_ro_compat", "Read-only compatible feature set")
+        yield RawBytes(self, "uuid", 16, "128-bit uuid for volume")
+        yield String(self, "volume_name", 16, "Volume name", strip="\0")
+        yield String(self, "last_mounted", 64, "Directory where last mounted", strip="\0")
+        yield UInt32(self, "compression", "For compression (algorithm usage bitmap)")
+        yield UInt8(self, "prealloc_blocks", "Number of blocks to try to preallocate")
+        yield UInt8(self, "prealloc_dir_blocks", "Number to preallocate for directories")
+        yield UInt16(self, "padding", "Padding")
+        yield String(self, "journal_uuid", 16, "uuid of journal superblock")
+        yield UInt32(self, "journal_inum", "inode number of journal file")
+        yield UInt32(self, "journal_dev", "device number of journal file")
+        yield UInt32(self, "last_orphan", "start of list of inodes to delete")
+        yield RawBytes(self, "reserved", 197, "Reserved")
+
+    def _getGroupCount(self):
+        if self._group_count is None:
+            # Calculate number of groups
+            blocks_per_group = self["blocks_per_group"].value
+            self._group_count = (self["blocks_count"].value - self["first_data_block"].value + (blocks_per_group - 1)) / blocks_per_group
+        return self._group_count
+    group_count = property(_getGroupCount)
+
+    def postMaxTime(self, chunk):
+        return humanDuration(chunk.value * 1000)
+
+class GroupDescriptors(FieldSet):
+    def __init__(self, parent, name, count):
+        FieldSet.__init__(self, parent, name)
+        self.count = count
+
+    def createDescription(self):
+        return "Group descriptors: %s items" % self.count
+
+    def createFields(self):
+        for index in range(0, self.count):
+            yield GroupDescriptor(self, "group[]", index)
+
+class InodeTable(FieldSet):
+    def __init__(self, parent, name, start, count):
+        FieldSet.__init__(self, parent, name)
+        self.start = start
+        self.count = count
+        self._size = self.count * self["/superblock/inode_size"].value * 8
+
+    def createDescription(self):
+        return "Group descriptors: %s items" % self.count
+
+    def createFields(self):
+        for index in range(self.start, self.start+self.count):
+            yield Inode(self, "inode[]", index)
+
+class Group(FieldSet):
+    def __init__(self, parent, name, index):
+        FieldSet.__init__(self, parent, name)
+        self.uniq_id = index
+
+    def createDescription(self):
+        desc = "Group %s: %s" % (self.uniq_id, humanFilesize(self.size/8))
+        if "superblock_copy" in self:
+            desc += " (with superblock copy)"
+        return desc
+
+    def createFields(self):
+        group = self["../group_desc/group[%u]" % self.uniq_id]
+        superblock = self["/superblock"]
+        block_size = self["/"].block_size
+
+        # Read block bitmap
+        addr = self.absolute_address + 56*8
+        self.superblock_copy = (self.stream.readBytes(addr, 2) == "\x53\xEF")
+        if self.superblock_copy:
+            yield SuperBlock(self, "superblock_copy")
+
+        # Compute number of block and inodes
+        block_count = superblock["blocks_per_group"].value
+        inode_count = superblock["inodes_per_group"].value
+        block_index = self.uniq_id * block_count
+        inode_index = self.uniq_id * inode_count
+        if (block_count % 8) != 0:
+            raise ParserError("Invalid block count")
+        if (inode_count % 8) != 0:
+            raise ParserError("Invalid inode count")
+        block_count = min(block_count, superblock["blocks_count"].value - block_index)
+        inode_count = min(inode_count, superblock["inodes_count"].value - inode_index)
+
+        # Read block bitmap
+        field = self.seekByte(group["block_bitmap"].value * block_size, relative=False, null=True)
+        if field:
+            yield field
+        yield BlockBitmap(self, "block_bitmap", block_index, block_count, "Block bitmap")
+
+        # Read inode bitmap
+        field = self.seekByte(group["inode_bitmap"].value * block_size, relative=False)
+        if field:
+            yield field
+        yield InodeBitmap(self, "inode_bitmap", inode_index, inode_count, "Inode bitmap")
+
+        # Read inode table
+        field = self.seekByte(alignValue(self.current_size//8, block_size))
+        if field:
+            yield field
+        yield InodeTable(self, "inode_table", inode_index, inode_count)
+
+        # Add padding if needed
+        addr = min(self.parent.size / 8,
+            (self.uniq_id+1) * superblock["blocks_per_group"].value * block_size)
+        yield self.seekByte(addr, "data", relative=False)
+
+class EXT2_FS(Parser):
+    """
+    Parse an EXT2 or EXT3 partition.
+
+    Attributes:
+       * block_size: Size of a block (in bytes)
+
+    Fields:
+       * superblock: Most important block, store most important informations
+       * ...
+    """
+    PARSER_TAGS = {
+        "id": "ext2",
+        "category": "file_system",
+        "description": "EXT2/EXT3 file system",
+        "min_size": (1024*2)*8,
+        "magic": (
+            # (magic, state=valid)
+            ("\x53\xEF\1\0", 1080*8),
+            # (magic, state=error)
+            ("\x53\xEF\2\0", 1080*8),
+            # (magic, state=error)
+            ("\x53\xEF\4\0", 1080*8),
+        ),
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes((1024+56)*8, 2) != "\x53\xEF":
+            return "Invalid magic number"
+        if not(0 <= self["superblock/log_block_size"].value <= 2):
+            return "Invalid (log) block size"
+        if self["superblock/inode_size"].value != (68 + 15*4):
+            return "Unsupported inode size"
+        return True
+
+    def createFields(self):
+        # Skip something (what is stored here? MBR?)
+        yield NullBytes(self, "padding[]", 1024)
+
+        # Read superblock
+        superblock = SuperBlock(self, "superblock")
+        yield superblock
+        if not(0 <= self["superblock/log_block_size"].value <= 2):
+            raise ParserError("EXT2: Invalid (log) block size")
+        self.block_size = 1024 << superblock["log_block_size"].value # in bytes
+
+        # Read groups' descriptor
+        field = self.seekByte(((1023 + superblock.size/8) / self.block_size + 1) * self.block_size, null=True)
+        if field:
+            yield field
+        groups = GroupDescriptors(self, "group_desc", superblock.group_count)
+        yield groups
+
+        # Read groups
+        address = groups["group[0]/block_bitmap"].value * self.block_size
+        field = self.seekByte(address, null=True)
+        if field:
+            yield field
+        for index in range(0, superblock.group_count):
+            yield Group(self, "group[]", index)
+
+    def getSuperblock(self):
+        # FIXME: Use superblock copy if main superblock is invalid
+        return self["superblock"]
+
+    def createDescription(self):
+        superblock = self.getSuperblock()
+        block_size = 1024 << superblock["log_block_size"].value
+        nb_block = superblock["blocks_count"].value
+        total = nb_block * block_size
+        used = (superblock["free_blocks_count"].value) * block_size
+        desc = "EXT2/EXT3"
+        if "group[0]/inode_table/inode[7]/blocks" in self:
+            if 0 < self["group[0]/inode_table/inode[7]/blocks"].value:
+                desc = "EXT3"
+            else:
+                desc = "EXT2"
+        return desc + " file system: total=%s, used=%s, block=%s" % (
+            humanFilesize(total), humanFilesize(used),
+            humanFilesize(block_size))
+
+
diff --git a/lib/hachoir_parser/file_system/fat.py b/lib/hachoir_parser/file_system/fat.py
new file mode 100644
index 0000000000000000000000000000000000000000..2aebe175f538a59abfb82c989e3a10148656988c
--- /dev/null
+++ b/lib/hachoir_parser/file_system/fat.py
@@ -0,0 +1,433 @@
+from hachoir_core.compatibility import sorted
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, StaticFieldSet,
+    RawBytes, PaddingBytes, createPaddingField, Link, Fragment,
+    Bit, Bits, UInt8, UInt16, UInt32,
+    String, Bytes, NullBytes)
+from hachoir_core.field.integer import GenericInteger
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.error import error
+from hachoir_core.tools import humanFilesize, makePrintable
+import datetime
+import re
+
+strip_index = re.compile(r'\[[^]]+]$')
+
+
+class Boot(FieldSet):
+    static_size = 512*8
+    def createFields(self):
+        yield Bytes(self, "jmp", 3, "Jump instruction (to skip over header on boot)")
+        yield Bytes(self, "oem_name", 8, "OEM Name (padded with spaces)")
+        yield UInt16(self, "sector_size", "Bytes per sector")
+        yield UInt8 (self, "cluster_size", "Sectors per cluster")
+        yield UInt16(self, "reserved_sectors", "Reserved sector count (including boot sector)")
+        yield UInt8 (self, "fat_nb", "Number of file allocation tables")
+        yield UInt16(self, "max_root", "Maximum number of root directory entries")
+        yield UInt16(self, "sectors1", "Total sectors (if zero, use 'sectors2')")
+        yield UInt8 (self, "media_desc", "Media descriptor")
+        yield UInt16(self, "fat_size", "Sectors per FAT")
+        yield UInt16(self, "track_size", "Sectors per track")
+        yield UInt16(self, "head_nb", "Number of heads")
+        yield UInt32(self, "hidden", "Hidden sectors")
+        yield UInt32(self, "sectors2", "Total sectors (if greater than 65535)")
+        if self.parent.version == 32:
+            yield UInt32(self, "fat32_size", "Sectors per FAT")
+            yield UInt16(self, "fat_flags", "FAT Flags")
+            yield UInt16(self, "version", "Version")
+            yield UInt32(self, "root_start", "Cluster number of root directory start")
+            yield UInt16(self, "inf_sector", "Sector number of FS Information Sector")
+            yield UInt16(self, "boot_copy", "Sector number of a copy of this boot sector")
+            yield NullBytes(self, "reserved[]", 12, "Reserved")
+        yield UInt8(self, "phys_drv", "Physical drive number")
+        yield NullBytes(self, "reserved[]", 1, 'Reserved ("current head")')
+        yield UInt8(self, "sign", "Signature")
+        yield textHandler(UInt32(self, "serial", "ID (serial number)"), hexadecimal)
+        yield String(self, "label", 11, "Volume Label", strip=' ', charset="ASCII")
+        yield String(self, "fs_type", 8, "FAT file system type", strip=' ', charset="ASCII")
+        yield Bytes(self, "code", 510-self.current_size/8, "Operating system boot code")
+        yield Bytes(self, "trail_sig", 2, "Signature (0x55 0xAA)")
+
+
+class FSInfo(StaticFieldSet):
+    format = (
+        (String, "lead_sig", 4, 'Signature ("RRaA")'),
+        (NullBytes,  "reserved[]", 480),
+        (String, "struct_sig", 4, 'Signature ("rrAa")'),
+        (UInt32, "free_count", "Last known free cluster count on the volume"),
+        (UInt32, "nxt_free",),
+        (NullBytes,  "reserved[]", 12),
+        (Bytes,  "trail_sig", 4, "Signature (0x00 0x00 0x55 0xAA)")
+    )
+
+
+class FAT(FieldSet):
+    class FAT(FieldSet):
+        def createFields(self):
+            parent = self.parent
+            version = parent.parent.version
+            text_handler = parent.text_handler
+            while self.current_size < self._size:
+                yield textHandler(GenericInteger(self, 'entry[]', False, version), text_handler)
+    def createFields(self):
+        version = self.parent.version
+        max_entry = 1 << min(28, version)
+        def FatEntry(chunk):
+            i = chunk.value
+            j = (1 - i) % max_entry
+            if j == 0:
+                return "reserved cluster"
+            elif j == 1:
+                return "free cluster"
+            elif j < 10:
+                return "end of a chain"
+            elif j == 10:
+                return "bad cluster"
+            elif j < 18:
+                return "reserved value"
+            else:
+                return str(i)
+        self.text_handler = FatEntry
+        while self.current_size < self._size:
+            yield FAT.FAT(self, 'group[]', size=min(1000*version,self._size-self.current_size))
+
+
+class Date(FieldSet):
+    def __init__(self, parent, name):
+        FieldSet.__init__(self, parent, name, size={
+            "create": 5,
+            "access": 2,
+            "modify": 4,
+        }[name] * 8)
+
+    def createFields(self):
+        size = self.size / 8
+        if size > 2:
+            if size > 4:
+                yield UInt8(self, "cs", "10ms units, values from 0 to 199")
+            yield Bits(self, "2sec", 5, "seconds/2")
+            yield Bits(self, "min", 6, "minutes")
+            yield Bits(self, "hour", 5, "hours")
+        yield Bits(self, "day", 5, "(1-31)")
+        yield Bits(self, "month", 4, "(1-12)")
+        yield Bits(self, "year", 7, "(0 = 1980, 127 = 2107)")
+
+    def createDescription(self):
+        date = [ self["year"].value, self["month"].value, self["day"].value ]
+        size = self.size / 8
+        if size > 2:
+            mkdate = datetime.datetime
+            cs = 200 * self["2sec"].value
+            if size > 4:
+                cs += self["cs"].value
+            date += [ self["hour"].value, self["min"].value, cs / 100, cs % 100 * 10000 ]
+        else:
+            mkdate = datetime.date
+        if date == [ 0 for i in date ]:
+            date = None
+        else:
+            date[0] += 1980
+            try:
+                date = mkdate(*tuple(date))
+            except ValueError:
+                return "invalid"
+        return str(date)
+
+
+class InodeLink(Link):
+    def __init__(self, parent, name, target=None):
+        Link.__init__(self, parent, name)
+        self.target = target
+        self.first = None
+
+    def _getTargetPath(self):
+        if not self.target:
+            parent = self.parent
+            self.target = strip_index.sub(r"\\", parent.parent._name) + parent.getFilename().rstrip("/")
+        return self.target
+
+    def createValue(self):
+        field = InodeGen(self["/"], self.parent, self._getTargetPath())(self)
+        if field:
+            self._display = field.path
+            return Link.createValue(self)
+
+    def createDisplay(self):
+        return "/%s[0]" % self._getTargetPath()
+
+
+class FileEntry(FieldSet):
+    static_size = 32*8
+    process = False
+    LFN = False
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self.status = self.stream.readBits(self.absolute_address, 8, LITTLE_ENDIAN)
+        if self.status in (0, 0xE5):
+            return
+
+        magic = self.stream.readBits(self.absolute_address+11*8, 8, LITTLE_ENDIAN)
+        if magic & 0x3F == 0x0F:
+            self.LFN = True
+        elif self.getFilename() not in (".", ".."):
+            self.process = True
+
+    def getFilename(self):
+        name = self["name"].value
+        if isinstance(name, str):
+            name = makePrintable(name, "ASCII", to_unicode=True)
+        ext = self["ext"].value
+        if ext:
+            name += "." + ext
+        if name[0] == 5:
+            name = "\xE5" + name[1:]
+        if not self.LFN and self["directory"].value:
+            name += "/"
+        return name
+
+    def createDescription(self):
+        if self.status == 0:
+            return "Free entry"
+        elif self.status == 0xE5:
+            return "Deleted file"
+        elif self.LFN:
+            name = "".join( field.value for field in self.array("name") )
+            try:
+                name = name[:name.index('\0')]
+            except ValueError:
+                pass
+            seq_no = self["seq_no"].value
+            return "Long filename part: '%s' [%u]" % (name, seq_no)
+        else:
+            return "File: '%s'" % self.getFilename()
+
+    def getCluster(self):
+        cluster = self["cluster_lo"].value
+        if self.parent.parent.version > 16:
+            cluster += self["cluster_hi"].value << 16
+        return cluster
+
+    def createFields(self):
+        if not self.LFN:
+            yield String(self, "name", 8, "DOS file name (padded with spaces)",
+                strip=' ', charset="ASCII")
+            yield String(self, "ext", 3, "DOS file extension (padded with spaces)",
+                strip=' ', charset="ASCII")
+            yield Bit(self, "read_only")
+            yield Bit(self, "hidden")
+            yield Bit(self, "system")
+            yield Bit(self, "volume_label")
+            yield Bit(self, "directory")
+            yield Bit(self, "archive")
+            yield Bit(self, "device")
+            yield Bit(self, "unused")
+            yield RawBytes(self, "reserved", 1, "Something about the case")
+            yield Date(self, "create")
+            yield Date(self, "access")
+            if self.parent.parent.version > 16:
+                yield UInt16(self, "cluster_hi")
+            else:
+                yield UInt16(self, "ea_index")
+            yield Date(self, "modify")
+            yield UInt16(self, "cluster_lo")
+            size = UInt32(self, "size")
+            yield size
+            if self.process:
+                del self.process
+                target_size = size.value
+                if self["directory"].value:
+                    if target_size:
+                        size.error("(FAT) value must be zero")
+                        target_size = 0
+                elif not target_size:
+                    return
+                self.target_size = 8 * target_size
+                yield InodeLink(self, "data")
+        else:
+            yield UInt8(self, "seq_no", "Sequence Number")
+            yield String(self, "name[]", 10, "(5 UTF-16 characters)",
+                charset="UTF-16-LE")
+            yield UInt8(self, "magic", "Magic number (15)")
+            yield NullBytes(self, "reserved", 1, "(always 0)")
+            yield UInt8(self, "checksum", "Checksum of DOS file name")
+            yield String(self, "name[]", 12, "(6 UTF-16 characters)",
+                charset="UTF-16-LE")
+            yield UInt16(self, "first_cluster", "(always 0)")
+            yield String(self, "name[]",  4, "(2 UTF-16 characters)",
+                charset="UTF-16-LE")
+
+class Directory(Fragment):
+    def createFields(self):
+        while self.current_size < self._size:
+            yield FileEntry(self, "entry[]")
+
+class File(Fragment):
+    def _getData(self):
+        return self["data"]
+    def createFields(self):
+        yield Bytes(self, "data", self.datasize/8)
+        padding = self._size - self.current_size
+        if padding:
+            yield createPaddingField(self, padding)
+
+class InodeGen:
+    def __init__(self, root, entry, path):
+        self.root = root
+        self.cluster = root.clusters(entry.getCluster)
+        self.path = path
+        self.filesize = entry.target_size
+        self.done = 0
+        def createInputStream(cis, **args):
+            args["size"] = self.filesize
+            args.setdefault("tags",[]).append(("filename", entry.getFilename()))
+            return cis(**args)
+        self.createInputStream = createInputStream
+
+    def __call__(self, prev):
+        name = self.path + "[]"
+        address, size, last = self.cluster.next()
+        if self.filesize:
+            if self.done >= self.filesize:
+                error("(FAT) bad metadata for " + self.path)
+                return
+            field = File(self.root, name, size=size)
+            if prev.first is None:
+                field._description = 'File size: %s' % humanFilesize(self.filesize//8)
+                field.setSubIStream(self.createInputStream)
+            field.datasize = min(self.filesize - self.done, size)
+            self.done += field.datasize
+        else:
+            field = Directory(self.root, name, size=size)
+        padding = self.root.getFieldByAddress(address, feed=False)
+        if not isinstance(padding, (PaddingBytes, RawBytes)):
+            error("(FAT) address %u doesn't point to a padding field" % address)
+            return
+        if last:
+            next = None
+        else:
+            next = lambda: self(field)
+        field.setLinks(prev.first, next)
+        self.root.writeFieldsIn(padding, address, (field,))
+        return field
+
+
+class FAT_FS(Parser):
+    endian = LITTLE_ENDIAN
+    PARSER_TAGS = {
+        "category": "file_system",
+        "min_size": 512*8,
+        "file_ext": ("",),
+    }
+
+    def _validate(self, type_offset):
+        if self.stream.readBytes(type_offset*8, 8) != ("FAT%-5u" % self.version):
+            return "Invalid FAT%u signature" % self.version
+        if self.stream.readBytes(510*8, 2) != "\x55\xAA":
+            return "Invalid BIOS signature"
+        return True
+
+    def clusters(self, cluster_func):
+        max_entry = (1 << min(28, self.version)) - 16
+        cluster = cluster_func()
+        if 1 < cluster < max_entry:
+            clus_nb = 1
+            next = cluster
+            while True:
+                next = self.fat[next/1000][next%1000].value
+                if not 1 < next < max_entry:
+                    break
+                if cluster + clus_nb == next:
+                    clus_nb += 1
+                else:
+                    yield self.data_start + cluster * self.cluster_size, clus_nb * self.cluster_size, False
+                    cluster = next
+                    clus_nb = 1
+            yield self.data_start + cluster * self.cluster_size, clus_nb * self.cluster_size, True
+
+    def createFields(self):
+        # Read boot seector
+        boot = Boot(self, "boot", "Boot sector")
+        yield boot
+        self.sector_size = boot["sector_size"].value
+
+        if self.version == 32:
+            for field in sorted((
+                (boot["inf_sector"].value, lambda: FSInfo(self, "fsinfo")),
+                (boot["boot_copy"].value, lambda: Boot(self, "bkboot", "Copy of the boot sector")),
+            )):
+                if field[0]:
+                    padding = self.seekByte(field[0] * self.sector_size)
+                    if padding:
+                        yield padding
+                    yield field[1]()
+        padding = self.seekByte(boot["reserved_sectors"].value * self.sector_size)
+        if padding:
+            yield padding
+
+        # Read the two FAT
+        fat_size = boot["fat_size"].value
+        if fat_size == 0:
+            fat_size = boot["fat32_size"].value
+        fat_size *= self.sector_size * 8
+        for i in xrange(boot["fat_nb"].value):
+            yield FAT(self, "fat[]", "File Allocation Table", size=fat_size)
+
+        # Read inode table (Directory)
+        self.cluster_size = boot["cluster_size"].value * self.sector_size * 8
+        self.fat = self["fat[0]"]
+        if "root_start" in boot:
+            self.target_size = 0
+            self.getCluster = lambda: boot["root_start"].value
+            yield InodeLink(self, "root", "root")
+        else:
+            yield Directory(self, "root[]", size=boot["max_root"].value * 32 * 8)
+        self.data_start = self.current_size - 2 * self.cluster_size
+        sectors = boot["sectors1"].value
+        if not sectors:
+            sectors = boot["sectors2"].value
+
+        # Create one big padding field for the end
+        size = sectors * self.sector_size
+        if self._size:
+            size = min(size, self.size//8)
+        padding = self.seekByte(size)
+        if padding:
+            yield padding
+
+
+class FAT12(FAT_FS):
+    PARSER_TAGS = {
+        "id": "fat12",
+        "description": "FAT12 filesystem",
+        "magic": (("FAT12   ", 54*8),),
+    }
+    version = 12
+
+    def validate(self):
+        return FAT_FS._validate(self, 54)
+
+
+class FAT16(FAT_FS):
+    PARSER_TAGS = {
+        "id": "fat16",
+        "description": "FAT16 filesystem",
+        "magic": (("FAT16   ", 54*8),),
+    }
+    version = 16
+
+    def validate(self):
+        return FAT_FS._validate(self, 54)
+
+
+class FAT32(FAT_FS):
+    PARSER_TAGS = {
+        "id": "fat32",
+        "description": "FAT32 filesystem",
+        "magic": (("FAT32   ", 82*8),),
+    }
+    version = 32
+
+    def validate(self):
+        return FAT_FS._validate(self, 82)
diff --git a/lib/hachoir_parser/file_system/iso9660.py b/lib/hachoir_parser/file_system/iso9660.py
new file mode 100644
index 0000000000000000000000000000000000000000..3d93593a9d4b5a81ce1a79f3ebb4345024126867
--- /dev/null
+++ b/lib/hachoir_parser/file_system/iso9660.py
@@ -0,0 +1,121 @@
+"""
+ISO 9660 (cdrom) file system parser.
+
+Documents:
+- Standard ECMA-119 (december 1987)
+  http://www.nondot.org/sabre/os/files/FileSystems/iso9660.pdf
+
+Author: Victor Stinner
+Creation: 11 july 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt32, UInt64, Enum,
+    NullBytes, RawBytes, String)
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+
+class PrimaryVolumeDescriptor(FieldSet):
+    static_size = 2041*8
+    def createFields(self):
+        yield NullBytes(self, "unused[]", 1)
+        yield String(self, "system_id", 32, "System identifier", strip=" ")
+        yield String(self, "volume_id", 32, "Volume identifier", strip=" ")
+        yield NullBytes(self, "unused[]", 8)
+        yield UInt64(self, "space_size", "Volume space size")
+        yield NullBytes(self, "unused[]", 32)
+        yield UInt32(self, "set_size", "Volume set size")
+        yield UInt32(self, "seq_num", "Sequence number")
+        yield UInt32(self, "block_size", "Block size")
+        yield UInt64(self, "path_table_size", "Path table size")
+        yield UInt32(self, "occu_lpath", "Location of Occurrence of Type L Path Table")
+        yield UInt32(self, "opt_lpath", "Location of Optional of Type L Path Table")
+        yield UInt32(self, "occu_mpath", "Location of Occurrence of Type M Path Table")
+        yield UInt32(self, "opt_mpath", "Location of Optional of Type M Path Table")
+        yield RawBytes(self, "root", 34, "Directory Record for Root Directory")
+        yield String(self, "vol_set_id", 128, "Volume set identifier", strip=" ")
+        yield String(self, "publisher", 128, "Publisher identifier", strip=" ")
+        yield String(self, "data_preparer", 128, "Data preparer identifier", strip=" ")
+        yield String(self, "application", 128, "Application identifier", strip=" ")
+        yield String(self, "copyright", 37, "Copyright file identifier", strip=" ")
+        yield String(self, "abstract", 37, "Abstract file identifier", strip=" ")
+        yield String(self, "biographic", 37, "Biographic file identifier", strip=" ")
+        yield String(self, "creation_ts", 17, "Creation date and time", strip=" ")
+        yield String(self, "modification_ts", 17, "Modification date and time", strip=" ")
+        yield String(self, "expiration_ts", 17, "Expiration date and time", strip=" ")
+        yield String(self, "effective_ts", 17, "Effective date and time", strip=" ")
+        yield UInt8(self, "struct_ver", "Structure version")
+        yield NullBytes(self, "unused[]", 1)
+        yield String(self, "app_use", 512, "Application use", strip=" \0")
+        yield NullBytes(self, "unused[]", 653)
+
+class BootRecord(FieldSet):
+    static_size = 2041*8
+    def createFields(self):
+        yield String(self, "sys_id", 31, "Boot system identifier", strip="\0")
+        yield String(self, "boot_id", 31, "Boot identifier", strip="\0")
+        yield RawBytes(self, "system_use", 1979, "Boot system use")
+
+class Terminator(FieldSet):
+    static_size = 2041*8
+    def createFields(self):
+        yield NullBytes(self, "null", 2041)
+
+class Volume(FieldSet):
+    endian = BIG_ENDIAN
+    TERMINATOR = 255
+    type_name = {
+        0: "Boot Record",
+        1: "Primary Volume Descriptor",
+        2: "Supplementary Volume Descriptor",
+        3: "Volume Partition Descriptor",
+        TERMINATOR: "Volume Descriptor Set Terminator",
+    }
+    static_size = 2048 * 8
+    content_handler = {
+        0: BootRecord,
+        1: PrimaryVolumeDescriptor,
+        TERMINATOR: Terminator,
+    }
+
+    def createFields(self):
+        yield Enum(UInt8(self, "type", "Volume descriptor type"), self.type_name)
+        yield RawBytes(self, "signature", 5, "ISO 9960 signature (CD001)")
+        if self["signature"].value != "CD001":
+            raise ParserError("Invalid ISO 9960 volume signature")
+        yield UInt8(self, "version", "Volume descriptor version")
+        cls = self.content_handler.get(self["type"].value, None)
+        if cls:
+            yield cls(self, "content")
+        else:
+            yield RawBytes(self, "raw_content", 2048-7)
+
+class ISO9660(Parser):
+    endian = LITTLE_ENDIAN
+    MAGIC = "\x01CD001"
+    NULL_BYTES = 0x8000
+    PARSER_TAGS = {
+        "id": "iso9660",
+        "category": "file_system",
+        "description": "ISO 9660 file system",
+        "min_size": (NULL_BYTES + 6)*8,
+        "magic": ((MAGIC, NULL_BYTES*8),),
+    }
+
+    def validate(self):
+        if self.stream.readBytes(self.NULL_BYTES*8, len(self.MAGIC)) != self.MAGIC:
+            return "Invalid signature"
+        return True
+
+    def createFields(self):
+        yield self.seekByte(self.NULL_BYTES, null=True)
+
+        while True:
+            volume = Volume(self, "volume[]")
+            yield volume
+            if volume["type"].value == Volume.TERMINATOR:
+                break
+
+        if self.current_size < self._size:
+            yield self.seekBit(self._size, "end")
+
diff --git a/lib/hachoir_parser/file_system/linux_swap.py b/lib/hachoir_parser/file_system/linux_swap.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea2e0dba0b94a670cff2ea47eae4ae14b58f8049
--- /dev/null
+++ b/lib/hachoir_parser/file_system/linux_swap.py
@@ -0,0 +1,114 @@
+"""
+Linux swap file.
+
+Documentation: Linux kernel source code, files:
+ - mm/swapfile.c
+ - include/linux/swap.h
+
+Author: Victor Stinner
+Creation date: 25 december 2006 (christmas ;-))
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (ParserError, GenericVector,
+    UInt32, String,
+    Bytes, NullBytes, RawBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.tools import humanFilesize
+from hachoir_core.bits import str2hex
+
+PAGE_SIZE = 4096
+
+# Definition of MAX_SWAP_BADPAGES in Linux kernel:
+#  (__swapoffset(magic.magic) - __swapoffset(info.badpages)) / sizeof(int)
+MAX_SWAP_BADPAGES = ((PAGE_SIZE - 10) - 1536) // 4
+
+class Page(RawBytes):
+    static_size = PAGE_SIZE*8
+    def __init__(self, parent, name):
+        RawBytes.__init__(self, parent, name, PAGE_SIZE)
+
+class UUID(Bytes):
+    static_size = 16*8
+    def __init__(self, parent, name):
+        Bytes.__init__(self, parent, name, 16)
+    def createDisplay(self):
+        text = str2hex(self.value, format=r"%02x")
+        return "%s-%s-%s-%s-%s" % (
+            text[:8], text[8:12], text[12:16], text[16:20], text[20:])
+
+class LinuxSwapFile(Parser):
+    PARSER_TAGS = {
+        "id": "linux_swap",
+        "file_ext": ("",),
+        "category": "file_system",
+        "min_size": PAGE_SIZE*8,
+        "description": "Linux swap file",
+        "magic": (
+            ("SWAP-SPACE", (PAGE_SIZE-10)*8),
+            ("SWAPSPACE2", (PAGE_SIZE-10)*8),
+            ("S1SUSPEND\0", (PAGE_SIZE-10)*8),
+        ),
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        magic = self.stream.readBytes((PAGE_SIZE-10)*8, 10)
+        if magic not in ("SWAP-SPACE", "SWAPSPACE2", "S1SUSPEND\0"):
+            return "Unknown magic string"
+        if MAX_SWAP_BADPAGES < self["nb_badpage"].value:
+            return "Invalid number of bad page (%u)" % self["nb_badpage"].value
+        return True
+
+    def getPageCount(self):
+        """
+        Number of pages which can really be used for swapping:
+        number of page minus bad pages minus one page (used for the header)
+        """
+        # -1 because first page is used for the header
+        return self["last_page"].value - self["nb_badpage"].value - 1
+
+    def createDescription(self):
+        if self["magic"].value == "S1SUSPEND\0":
+            text = "Suspend swap file version 1"
+        elif self["magic"].value == "SWAPSPACE2":
+            text = "Linux swap file version 2"
+        else:
+            text = "Linux swap file version 1"
+        nb_page = self.getPageCount()
+        return "%s, page size: %s, %s pages" % (
+            text, humanFilesize(PAGE_SIZE), nb_page)
+
+    def createFields(self):
+        # First kilobyte: boot sectors
+        yield RawBytes(self, "boot", 1024, "Space for disklabel etc.")
+
+        # Header
+        yield UInt32(self, "version")
+        yield UInt32(self, "last_page")
+        yield UInt32(self, "nb_badpage")
+        yield UUID(self, "sws_uuid")
+        yield UUID(self, "sws_volume")
+        yield NullBytes(self, "reserved", 117*4)
+
+        # Read bad pages (if any)
+        count = self["nb_badpage"].value
+        if count:
+            if MAX_SWAP_BADPAGES < count:
+                raise ParserError("Invalid number of bad page (%u)" % count)
+            yield GenericVector(self, "badpages", count, UInt32, "badpage")
+
+        # Read magic
+        padding = self.seekByte(PAGE_SIZE - 10, "padding", null=True)
+        if padding:
+            yield padding
+        yield String(self, "magic", 10, charset="ASCII")
+
+        # Read all pages
+        yield GenericVector(self, "pages", self["last_page"].value, Page, "page")
+
+        # Padding at the end
+        padding = self.seekBit(self.size, "end_padding", null=True)
+        if padding:
+            yield padding
+
diff --git a/lib/hachoir_parser/file_system/mbr.py b/lib/hachoir_parser/file_system/mbr.py
new file mode 100644
index 0000000000000000000000000000000000000000..d5c366f89abdb7ee65e7a48b8d8bed9c74d093c0
--- /dev/null
+++ b/lib/hachoir_parser/file_system/mbr.py
@@ -0,0 +1,230 @@
+"""
+Master Boot Record.
+
+
+"""
+
+# cfdisk uses the following algorithm to compute the geometry:
+# 0. Use the values given by the user.
+# 1. Try to guess the geometry from the partition table:
+#    if all the used partitions end at the same head H and the
+#    same sector S, then there are (H+1) heads and S sectors/cylinder.
+# 2. Ask the system (ioctl/HDIO_GETGEO).
+# 3. 255 heads and 63 sectors/cylinder.
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    Enum, Bits, UInt8, UInt16, UInt32,
+    RawBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.tools import humanFilesize
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+BLOCK_SIZE = 512  # bytes
+
+class CylinderNumber(Bits):
+    def __init__(self, parent, name, description=None):
+        Bits.__init__(self, parent, name, 10, description)
+
+    def createValue(self):
+        i = self.parent.stream.readInteger(
+            self.absolute_address, False, self._size, self.parent.endian)
+        return i >> 2 | i % 4 << 8
+
+class PartitionHeader(FieldSet):
+    static_size = 16*8
+
+    # taken from the source of cfdisk:
+    # sed -n 's/.*{\(.*\), N_(\(.*\))}.*/        \1: \2,/p' i386_sys_types.c
+    system_name = {
+        0x00: "Empty",
+        0x01: "FAT12",
+        0x02: "XENIX root",
+        0x03: "XENIX usr",
+        0x04: "FAT16 <32M",
+        0x05: "Extended",
+        0x06: "FAT16",
+        0x07: "HPFS/NTFS",
+        0x08: "AIX",
+        0x09: "AIX bootable",
+        0x0a: "OS/2 Boot Manager",
+        0x0b: "W95 FAT32",
+        0x0c: "W95 FAT32 (LBA)",
+        0x0e: "W95 FAT16 (LBA)",
+        0x0f: "W95 Ext'd (LBA)",
+        0x10: "OPUS",
+        0x11: "Hidden FAT12",
+        0x12: "Compaq diagnostics",
+        0x14: "Hidden FAT16 <32M",
+        0x16: "Hidden FAT16",
+        0x17: "Hidden HPFS/NTFS",
+        0x18: "AST SmartSleep",
+        0x1b: "Hidden W95 FAT32",
+        0x1c: "Hidden W95 FAT32 (LBA)",
+        0x1e: "Hidden W95 FAT16 (LBA)",
+        0x24: "NEC DOS",
+        0x39: "Plan 9",
+        0x3c: "PartitionMagic recovery",
+        0x40: "Venix 80286",
+        0x41: "PPC PReP Boot",
+        0x42: "SFS",
+        0x4d: "QNX4.x",
+        0x4e: "QNX4.x 2nd part",
+        0x4f: "QNX4.x 3rd part",
+        0x50: "OnTrack DM",
+        0x51: "OnTrack DM6 Aux1",
+        0x52: "CP/M",
+        0x53: "OnTrack DM6 Aux3",
+        0x54: "OnTrackDM6",
+        0x55: "EZ-Drive",
+        0x56: "Golden Bow",
+        0x5c: "Priam Edisk",
+        0x61: "SpeedStor",
+        0x63: "GNU HURD or SysV",
+        0x64: "Novell Netware 286",
+        0x65: "Novell Netware 386",
+        0x70: "DiskSecure Multi-Boot",
+        0x75: "PC/IX",
+        0x80: "Old Minix",
+        0x81: "Minix / old Linux",
+        0x82: "Linux swap / Solaris",
+        0x83: "Linux (ext2/ext3)",
+        0x84: "OS/2 hidden C: drive",
+        0x85: "Linux extended",
+        0x86: "NTFS volume set",
+        0x87: "NTFS volume set",
+        0x88: "Linux plaintext",
+        0x8e: "Linux LVM",
+        0x93: "Amoeba",
+        0x94: "Amoeba BBT",
+        0x9f: "BSD/OS",
+        0xa0: "IBM Thinkpad hibernation",
+        0xa5: "FreeBSD",
+        0xa6: "OpenBSD",
+        0xa7: "NeXTSTEP",
+        0xa8: "Darwin UFS",
+        0xa9: "NetBSD",
+        0xab: "Darwin boot",
+        0xb7: "BSDI fs",
+        0xb8: "BSDI swap",
+        0xbb: "Boot Wizard hidden",
+        0xbe: "Solaris boot",
+        0xbf: "Solaris",
+        0xc1: "DRDOS/sec (FAT-12)",
+        0xc4: "DRDOS/sec (FAT-16 < 32M)",
+        0xc6: "DRDOS/sec (FAT-16)",
+        0xc7: "Syrinx",
+        0xda: "Non-FS data",
+        0xdb: "CP/M / CTOS / ...",
+        0xde: "Dell Utility",
+        0xdf: "BootIt",
+        0xe1: "DOS access",
+        0xe3: "DOS R/O",
+        0xe4: "SpeedStor",
+        0xeb: "BeOS fs",
+        0xee: "EFI GPT",
+        0xef: "EFI (FAT-12/16/32)",
+        0xf0: "Linux/PA-RISC boot",
+        0xf1: "SpeedStor",
+        0xf4: "SpeedStor",
+        0xf2: "DOS secondary",
+        0xfd: "Linux raid autodetect",
+        0xfe: "LANstep",
+        0xff: "BBT"
+    }
+
+    def createFields(self):
+        yield UInt8(self, "bootable", "Bootable flag (true if equals to 0x80)")
+        if self["bootable"].value not in (0x00, 0x80):
+            self.warning("Stream doesn't look like master boot record (partition bootable error)!")
+        yield UInt8(self, "start_head", "Starting head number of the partition")
+        yield Bits(self, "start_sector", 6, "Starting sector number of the partition")
+        yield CylinderNumber(self, "start_cylinder", "Starting cylinder number of the partition")
+        yield Enum(UInt8(self, "system", "System indicator"), self.system_name)
+        yield UInt8(self, "end_head", "Ending head number of the partition")
+        yield Bits(self, "end_sector", 6, "Ending sector number of the partition")
+        yield CylinderNumber(self, "end_cylinder", "Ending cylinder number of the partition")
+        yield UInt32(self, "LBA", "LBA (number of sectors before this partition)")
+        yield UInt32(self, "size", "Size (block count)")
+
+    def isUsed(self):
+        return self["system"].value != 0
+
+    def createDescription(self):
+        desc = "Partition header: "
+        if self.isUsed():
+            system = self["system"].display
+            size = self["size"].value * BLOCK_SIZE
+            desc += "%s, %s" % (system, humanFilesize(size))
+        else:
+            desc += "(unused)"
+        return desc
+
+
+class MasterBootRecord(FieldSet):
+    static_size = 512*8
+
+    def createFields(self):
+        yield RawBytes(self, "program", 446, "Boot program (Intel x86 machine code)")
+        yield PartitionHeader(self, "header[0]")
+        yield PartitionHeader(self, "header[1]")
+        yield PartitionHeader(self, "header[2]")
+        yield PartitionHeader(self, "header[3]")
+        yield textHandler(UInt16(self, "signature", "Signature (0xAA55)"), hexadecimal)
+
+    def _getPartitions(self):
+        return ( self[index] for index in xrange(1,5) )
+    headers = property(_getPartitions)
+
+
+class Partition(FieldSet):
+    def createFields(self):
+        mbr = MasterBootRecord(self, "mbr")
+        yield mbr
+
+        # No error if we only want to analyse a backup of a mbr
+        if self.eof:
+            return
+
+        for start, index, header in sorted((hdr["LBA"].value, index, hdr)
+                for index, hdr in enumerate(mbr.headers) if hdr.isUsed()):
+            # Seek to the beginning of the partition
+            padding = self.seekByte(start * BLOCK_SIZE, "padding[]")
+            if padding:
+                yield padding
+
+            # Content of the partition
+            name = "partition[%u]" % index
+            size = BLOCK_SIZE * header["size"].value
+            desc = header["system"].display
+            if header["system"].value == 5:
+                yield Partition(self, name, desc, size * 8)
+            else:
+                yield RawBytes(self, name, size, desc)
+
+        # Padding at the end
+        if self.current_size < self._size:
+            yield self.seekBit(self._size, "end")
+
+
+class MSDos_HardDrive(Parser, Partition):
+    endian = LITTLE_ENDIAN
+    MAGIC = "\x55\xAA"
+    PARSER_TAGS = {
+        "id": "msdos_harddrive",
+        "category": "file_system",
+        "description": "MS-DOS hard drive with Master Boot Record (MBR)",
+        "min_size": 512*8,
+        "file_ext": ("",),
+#        "magic": ((MAGIC, 510*8),),
+    }
+
+    def validate(self):
+        if self.stream.readBytes(510*8, 2) != self.MAGIC:
+            return "Invalid signature"
+        used = False
+        for hdr in self["mbr"].headers:
+            if hdr["bootable"].value not in (0x00, 0x80):
+                return "Wrong boot flag"
+            used |= hdr.isUsed()
+        return used or "No partition found"
diff --git a/lib/hachoir_parser/file_system/ntfs.py b/lib/hachoir_parser/file_system/ntfs.py
new file mode 100644
index 0000000000000000000000000000000000000000..efea7e71347fc526cbc1f295c6f8582a4ee08eb9
--- /dev/null
+++ b/lib/hachoir_parser/file_system/ntfs.py
@@ -0,0 +1,285 @@
+"""
+New Technology File System (NTFS) file system parser.
+
+Sources:
+- The NTFS documentation
+  http://www.linux-ntfs.org/
+- NTFS-3G driver
+  http://www.ntfs-3g.org/
+
+Creation date: 3rd january 2007
+Author: Victor Stinner
+"""
+
+SECTOR_SIZE = 512
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, Enum,
+    UInt8, UInt16, UInt32, UInt64, TimestampWin64,
+    String, Bytes, Bit,
+    NullBits, NullBytes, PaddingBytes, RawBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+from hachoir_core.tools import humanFilesize, createDict
+from hachoir_parser.common.msdos import MSDOSFileAttr32
+
+class BiosParameterBlock(FieldSet):
+    """
+    BIOS parameter block (bpb) structure
+    """
+    static_size = 25 * 8
+    MEDIA_TYPE = {0xf8: "Hard disk"}
+
+    def createFields(self):
+        yield UInt16(self, "bytes_per_sector", "Size of a sector in bytes")
+        yield UInt8(self, "sectors_per_cluster", "Size of a cluster in sectors")
+        yield NullBytes(self, "reserved_sectors", 2)
+        yield NullBytes(self, "fats", 1)
+        yield NullBytes(self, "root_entries", 2)
+        yield NullBytes(self, "sectors", 2)
+        yield Enum(UInt8(self, "media_type"), self.MEDIA_TYPE)
+        yield NullBytes(self, "sectors_per_fat", 2)
+        yield UInt16(self, "sectors_per_track")
+        yield UInt16(self, "heads")
+        yield UInt32(self, "hidden_sectors")
+        yield NullBytes(self, "large_sectors", 4)
+
+    def validate(self):
+        if self["bytes_per_sector"].value not in (256, 512, 1024, 2048, 4096):
+            return "Invalid sector size (%u bytes)" % \
+                self["bytes_per_sector"].value
+        if self["sectors_per_cluster"].value not in (1, 2, 4, 8, 16, 32, 64, 128):
+            return "Invalid cluster size (%u sectors)" % \
+                self["sectors_per_cluster"].value
+        return ""
+
+class MasterBootRecord(FieldSet):
+    static_size = 512*8
+    def createFields(self):
+        yield Bytes(self, "jump", 3, "Intel x86 jump instruction")
+        yield String(self, "name", 8)
+        yield BiosParameterBlock(self, "bios", "BIOS parameters")
+
+        yield textHandler(UInt8(self, "physical_drive", "(0x80)"), hexadecimal)
+        yield NullBytes(self, "current_head", 1)
+        yield textHandler(UInt8(self, "ext_boot_sig", "Extended boot signature (0x80)"), hexadecimal)
+        yield NullBytes(self, "unused", 1)
+
+        yield UInt64(self, "nb_sectors")
+        yield UInt64(self, "mft_cluster", "Cluster location of MFT data")
+        yield UInt64(self, "mftmirr_cluster", "Cluster location of copy of MFT")
+        yield UInt8(self, "cluster_per_mft", "MFT record size in clusters")
+        yield NullBytes(self, "reserved[]", 3)
+        yield UInt8(self, "cluster_per_index", "Index block size in clusters")
+        yield NullBytes(self, "reserved[]", 3)
+        yield textHandler(UInt64(self, "serial_number"), hexadecimal)
+        yield textHandler(UInt32(self, "checksum", "Boot sector checksum"), hexadecimal)
+        yield Bytes(self, "boot_code", 426)
+        yield Bytes(self, "mbr_magic", 2, r"Master boot record magic number (\x55\xAA)")
+
+    def createDescription(self):
+        size = self["nb_sectors"].value * self["bios/bytes_per_sector"].value
+        return "NTFS Master Boot Record (%s)" % humanFilesize(size)
+
+class MFT_Flags(FieldSet):
+    static_size = 16
+    def createFields(self):
+        yield Bit(self, "in_use")
+        yield Bit(self, "is_directory")
+        yield NullBits(self, "padding", 14)
+
+class Attribute(FieldSet):
+    # --- Common code ---
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["size"].value * 8
+        type = self["type"].value
+        if type in self.ATTR_INFO:
+            self._name = self.ATTR_INFO[type][0]
+            self._parser = self.ATTR_INFO[type][2]
+
+    def createFields(self):
+        yield Enum(textHandler(UInt32(self, "type"), hexadecimal), self.ATTR_NAME)
+        yield UInt32(self, "size")
+        yield UInt8(self, "non_resident", "Non-resident flag")
+        yield UInt8(self, "name_length", "Name length in bytes")
+        yield UInt16(self, "name_offset", "Name offset")
+        yield UInt16(self, "flags")
+        yield textHandler(UInt16(self, "attribute_id"), hexadecimal)
+        yield UInt32(self, "length_attr", "Length of the Attribute")
+        yield UInt16(self, "offset_attr", "Offset of the Attribute")
+        yield UInt8(self, "indexed_flag")
+        yield NullBytes(self, "padding", 1)
+        if self._parser:
+            for field in self._parser(self):
+                yield field
+        else:
+            size = self["length_attr"].value
+            if size:
+                yield RawBytes(self, "data", size)
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield PaddingBytes(self, "end_padding", size)
+
+    def createDescription(self):
+        return "Attribute %s" % self["type"].display
+    FILENAME_NAMESPACE = {
+        0: "POSIX",
+        1: "Win32",
+        2: "DOS",
+        3: "Win32 & DOS",
+    }
+
+    # --- Parser specific to a type ---
+    def parseStandardInfo(self):
+        yield TimestampWin64(self, "ctime", "File Creation")
+        yield TimestampWin64(self, "atime", "File Altered")
+        yield TimestampWin64(self, "mtime", "MFT Changed")
+        yield TimestampWin64(self, "rtime", "File Read")
+        yield MSDOSFileAttr32(self, "file_attr", "DOS File Permissions")
+        yield UInt32(self, "max_version", "Maximum Number of Versions")
+        yield UInt32(self, "version", "Version Number")
+        yield UInt32(self, "class_id")
+        yield UInt32(self, "owner_id")
+        yield UInt32(self, "security_id")
+        yield filesizeHandler(UInt64(self, "quota_charged", "Quota Charged"))
+        yield UInt64(self, "usn", "Update Sequence Number (USN)")
+
+    def parseFilename(self):
+        yield UInt64(self, "ref", "File reference to the parent directory")
+        yield TimestampWin64(self, "ctime", "File Creation")
+        yield TimestampWin64(self, "atime", "File Altered")
+        yield TimestampWin64(self, "mtime", "MFT Changed")
+        yield TimestampWin64(self, "rtime", "File Read")
+        yield filesizeHandler(UInt64(self, "alloc_size", "Allocated size of the file"))
+        yield filesizeHandler(UInt64(self, "real_size", "Real size of the file"))
+        yield UInt32(self, "file_flags")
+        yield UInt32(self, "file_flags2", "Used by EAs and Reparse")
+        yield UInt8(self, "filename_length", "Filename length in characters")
+        yield Enum(UInt8(self, "filename_namespace"), self.FILENAME_NAMESPACE)
+        size = self["filename_length"].value * 2
+        if size:
+            yield String(self, "filename", size, charset="UTF-16-LE")
+
+    def parseData(self):
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield Bytes(self, "data", size)
+
+    def parseBitmap(self):
+        size = (self.size - self.current_size)
+        for index in xrange(size):
+            yield Bit(self, "bit[]")
+
+    # --- Type information ---
+    ATTR_INFO = {
+         0x10: ('standard_info', 'STANDARD_INFORMATION ', parseStandardInfo),
+         0x20: ('attr_list', 'ATTRIBUTE_LIST ', None),
+         0x30: ('filename', 'FILE_NAME ', parseFilename),
+         0x40: ('vol_ver', 'VOLUME_VERSION', None),
+         0x40: ('obj_id', 'OBJECT_ID ', None),
+         0x50: ('security', 'SECURITY_DESCRIPTOR ', None),
+         0x60: ('vol_name', 'VOLUME_NAME ', None),
+         0x70: ('vol_info', 'VOLUME_INFORMATION ', None),
+         0x80: ('data', 'DATA ', parseData),
+         0x90: ('index_root', 'INDEX_ROOT ', None),
+         0xA0: ('index_alloc', 'INDEX_ALLOCATION ', None),
+         0xB0: ('bitmap', 'BITMAP ', parseBitmap),
+         0xC0: ('sym_link', 'SYMBOLIC_LINK', None),
+         0xC0: ('reparse', 'REPARSE_POINT ', None),
+         0xD0: ('ea_info', 'EA_INFORMATION ', None),
+         0xE0: ('ea', 'EA ', None),
+         0xF0: ('prop_set', 'PROPERTY_SET', None),
+        0x100: ('log_util', 'LOGGED_UTILITY_STREAM', None),
+    }
+    ATTR_NAME = createDict(ATTR_INFO, 1)
+
+class File(FieldSet):
+#    static_size = 48*8
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["bytes_allocated"].value * 8
+
+    def createFields(self):
+        yield Bytes(self, "signature", 4, "Usually the magic is 'FILE'")
+        yield UInt16(self, "usa_ofs", "Update Sequence Array offset")
+        yield UInt16(self, "usa_count", "Update Sequence Array count")
+        yield UInt64(self, "lsn", "$LogFile sequence number for this record")
+        yield UInt16(self, "sequence_number", "Number of times this mft record has been reused")
+        yield UInt16(self, "link_count", "Number of hard links")
+        yield UInt16(self, "attrs_offset", "Byte offset to the first attribute")
+        yield MFT_Flags(self, "flags")
+        yield UInt32(self, "bytes_in_use", "Number of bytes used in this record")
+        yield UInt32(self, "bytes_allocated", "Number of bytes allocated for this record")
+        yield UInt64(self, "base_mft_record")
+        yield UInt16(self, "next_attr_instance")
+
+        # The below fields are specific to NTFS 3.1+ (Windows XP and above)
+        yield NullBytes(self, "reserved", 2)
+        yield UInt32(self, "mft_record_number", "Number of this mft record")
+
+        padding = self.seekByte(self["attrs_offset"].value, relative=True)
+        if padding:
+            yield padding
+
+        while not self.eof:
+            addr = self.absolute_address + self.current_size
+            if self.stream.readBytes(addr, 4) == "\xFF\xFF\xFF\xFF":
+                yield Bytes(self, "attr_end_marker", 8)
+                break
+            yield Attribute(self, "attr[]")
+
+        size = self["bytes_in_use"].value - self.current_size//8
+        if size:
+            yield RawBytes(self, "end_rawdata", size)
+
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield RawBytes(self, "end_padding", size, "Unused but allocated bytes")
+
+    def createDescription(self):
+        text = "File"
+        if "filename/filename" in self:
+            text += ' "%s"' % self["filename/filename"].value
+        if "filename/real_size" in self:
+            text += ' (%s)' % self["filename/real_size"].display
+        if "standard_info/file_attr" in self:
+            text += ', %s' % self["standard_info/file_attr"].display
+        return text
+
+class NTFS(Parser):
+    MAGIC = "\xEB\x52\x90NTFS    "
+    PARSER_TAGS = {
+        "id": "ntfs",
+        "category": "file_system",
+        "description": "NTFS file system",
+        "min_size": 1024*8,
+        "magic": ((MAGIC, 0),),
+    }
+    endian = LITTLE_ENDIAN
+    _cluster_size = None
+
+    def validate(self):
+        if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC:
+            return "Invalid magic string"
+        err = self["mbr/bios"].validate()
+        if err:
+            return err
+        return True
+
+    def createFields(self):
+        yield MasterBootRecord(self, "mbr")
+
+        bios = self["mbr/bios"]
+        cluster_size = bios["sectors_per_cluster"].value * bios["bytes_per_sector"].value
+        offset = self["mbr/mft_cluster"].value * cluster_size
+        padding = self.seekByte(offset, relative=False)
+        if padding:
+            yield padding
+        for index in xrange(1000):
+            yield File(self, "file[]")
+
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield RawBytes(self, "end", size)
+
diff --git a/lib/hachoir_parser/file_system/reiser_fs.py b/lib/hachoir_parser/file_system/reiser_fs.py
new file mode 100644
index 0000000000000000000000000000000000000000..e71eb9509358715f2fe1b4b0e09eb2a969fc6717
--- /dev/null
+++ b/lib/hachoir_parser/file_system/reiser_fs.py
@@ -0,0 +1,120 @@
+"""
+ReiserFS file system version 3 parser (version 1, 2 and 4 are not supported).
+
+Author: Frederic Weisbecker
+Creation date: 8 december 2006
+
+Sources:
+ - http://p-nand-q.com/download/rfstool/reiserfs_docs.html
+ - http://homes.cerias.purdue.edu/~florian/reiser/reiserfs.php
+ - file://usr/src/linux-2.6.16.19/include/linux/reiserfs_fs.h
+
+NOTES:
+
+The most part of the description of the structures, their fields and their
+comments decribed here comes from the file include/linux/reiserfs_fs.h
+- written by Hans reiser - located in the Linux kernel 2.6.16.19 and from
+the Reiserfs explanations in
+http://p-nand-q.com/download/rfstool/reiserfs_docs.html written by Gerson
+Kurz.
+"""
+
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, Enum,
+    UInt16, UInt32, String, RawBytes, NullBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+
+class Journal_params(FieldSet):
+    static_size = 32*8
+
+    def createFields(self):
+        yield UInt32(self, "1st_block", "Journal 1st block number")
+        yield UInt32(self, "dev", "Journal device number")
+        yield UInt32(self, "size", "Size of the journal")
+        yield UInt32(self, "trans_max", "Max number of blocks in a transaction")
+        #TODO: Must be explained: it was sb_journal_block_count
+        yield UInt32(self, "magic", "Random value made on fs creation.")
+        yield UInt32(self, "max_batch", "Max number of blocks to batch into a trans")
+        yield UInt32(self, "max_commit_age", "In seconds, how old can an async commit be")
+        yield UInt32(self, "max_trans_age", "In seconds, how old can a transaction be")
+
+
+    def createDescription(self):
+        return "Parameters of the journal"
+
+class SuperBlock(FieldSet):
+    static_size = 204*8
+
+    UMOUNT_STATE = { 1: "unmounted", 2: "not unmounted" }
+    HASH_FUNCTIONS = {
+        0: "UNSET_HASH",
+        1: "TEA_HASH",
+        2: "YURA_HASH",
+        3: "R5_HASH"
+    }
+
+    def createFields(self):
+        #TODO: This structure is normally divided in two parts:
+        # _reiserfs_super_block_v1
+        # _reiserfs_super_block
+        # It will be divided later to easily support older version of the first part
+        yield UInt32(self, "block_count", "Number of blocks")
+        yield UInt32(self, "free_blocks", "Number of free blocks")
+        yield UInt32(self, "root_block", "Root block number")
+        yield Journal_params(self, "Journal parameters")
+        yield UInt16(self, "blocksize", "Size of a block")
+        yield UInt16(self, "oid_maxsize", "Max size of object id array")
+        yield UInt16(self, "oid_cursize", "Current size of object id array")
+        yield Enum(UInt16(self, "umount_state", "Filesystem umounted or not"), self.UMOUNT_STATE)
+        yield String(self, "magic", 10, "Magic string", strip="\0")
+        #TODO: change the type of s_fs_state in Enum to have more details about this fsck state
+        yield UInt16(self, "fs_state", "Rebuilding phase of fsck ")
+        yield Enum(UInt32(self, "hash_function", "Hash function to sort names in a directory"), self.HASH_FUNCTIONS)
+        yield UInt16(self, "tree_height", "Height of disk tree")
+        yield UInt16(self, "bmap_nr", "Amount of bitmap blocks needed to address each block of file system")
+        #TODO: find a good description for this field
+        yield UInt16(self, "version", "Field only reliable on filesystem with non-standard journal")
+        yield UInt16(self, "reserved_for_journal", "Size in blocks of journal area on main device")
+        #TODO: same as above
+        yield UInt32(self, "inode_generation", "No description")
+        #TODO: same as above and should be an enum field
+        yield UInt32(self, "flags", "No description")
+        #TODO: Create a special Type to format this id
+        yield RawBytes(self, "uuid", 16, "Filesystem unique identifier")
+        yield String(self, "label", 16, "Filesystem volume label", strip="\0")
+        yield NullBytes(self, "unused", 88)
+
+    def createDescription(self):
+        return "Superblock: ReiserFs Filesystem"
+
+class REISER_FS(Parser):
+    PARSER_TAGS = {
+        "id": "reiserfs",
+        "category": "file_system",
+        # 130 blocks before the journal +
+        # Minimal size of journal (513 blocks) +
+        # 1 block for the rest
+        # And The Minimal size of a block is 512 bytes
+        "min_size": (130+513+1) * (512*8),
+        "description": "ReiserFS file system"
+    }
+    endian = LITTLE_ENDIAN
+
+    # Offsets (in bytes) of important information
+    SUPERBLOCK_OFFSET = 64*1024
+    MAGIC_OFFSET = SUPERBLOCK_OFFSET + 52
+
+    def validate(self):
+        # Let's look at the magic field in the superblock
+        magic = self.stream.readBytes(self.MAGIC_OFFSET*8, 9).rstrip("\0")
+        if magic == "ReIsEr3Fs":
+            return True
+        if magic in ("ReIsEr2Fs", "ReIsErFs"):
+            return "Unsupported version of ReiserFs"
+        return "Invalid magic string"
+
+    def createFields(self):
+        yield NullBytes(self, "padding[]", self.SUPERBLOCK_OFFSET)
+        yield SuperBlock(self, "superblock")
+
diff --git a/lib/hachoir_parser/game/__init__.py b/lib/hachoir_parser/game/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b6447b981c9c4787e493053cdd0a2a2f662f937
--- /dev/null
+++ b/lib/hachoir_parser/game/__init__.py
@@ -0,0 +1,4 @@
+from hachoir_parser.game.zsnes import ZSNESFile
+from hachoir_parser.game.spider_man_video import SpiderManVideoFile
+from hachoir_parser.game.laf import LafFile
+from hachoir_parser.game.blp import BLP1File, BLP2File
\ No newline at end of file
diff --git a/lib/hachoir_parser/game/blp.py b/lib/hachoir_parser/game/blp.py
new file mode 100644
index 0000000000000000000000000000000000000000..218e864014729ca35cf5406530f73124ad78d610
--- /dev/null
+++ b/lib/hachoir_parser/game/blp.py
@@ -0,0 +1,269 @@
+"""
+Blizzard BLP Image File Parser
+
+Author: Robert Xiao
+Creation date: July 10 2007
+
+- BLP1 File Format
+  http://magos.thejefffiles.com/War3ModelEditor/MagosBlpFormat.txt
+- BLP2 File Format (Wikipedia)
+  http://en.wikipedia.org/wiki/.BLP
+- S3TC (DXT1, 3, 5) Formats
+  http://en.wikipedia.org/wiki/S3_Texture_Compression
+"""
+
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.field import String, UInt32, UInt8, Enum, FieldSet, RawBytes, GenericVector, Bit, Bits
+from hachoir_parser.parser import Parser
+from hachoir_parser.image.common import PaletteRGBA
+from hachoir_core.tools import alignValue
+
+class PaletteIndex(UInt8):
+    def createDescription(self):
+        return "Palette index %i (%s)" % (self.value, self["/palette/color[%i]" % self.value].description)
+
+class Generic2DArray(FieldSet):
+    def __init__(self, parent, name, width, height, item_class, row_name="row", item_name="item", *args, **kwargs):
+        FieldSet.__init__(self, parent, name, *args, **kwargs)
+        self.width = width
+        self.height = height
+        self.item_class = item_class
+        self.row_name = row_name
+        self.item_name = item_name
+
+    def createFields(self):
+        for i in xrange(self.height):
+            yield GenericVector(self, self.row_name+"[]", self.width, self.item_class, self.item_name)
+
+class BLP1File(Parser):
+    MAGIC = "BLP1"
+    PARSER_TAGS = {
+        "id": "blp1",
+        "category": "game",
+        "file_ext": ("blp",),
+        "mime": (u"application/x-blp",), # TODO: real mime type???
+        "magic": ((MAGIC, 0),),
+        "min_size": 7*32,   # 7 DWORDs start, incl. magic
+        "description": "Blizzard Image Format, version 1",
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != "BLP1":
+            return "Invalid magic"
+        return True
+
+    def createFields(self):
+        yield String(self, "magic", 4, "Signature (BLP1)")
+        yield Enum(UInt32(self, "compression"), {
+            0:"JPEG Compression",
+            1:"Uncompressed"})
+        yield UInt32(self, "flags")
+        yield UInt32(self, "width")
+        yield UInt32(self, "height")
+        yield Enum(UInt32(self, "type"), {
+            3:"Uncompressed Index List + Alpha List",
+            4:"Uncompressed Index List + Alpha List",
+            5:"Uncompressed Index List"})
+        yield UInt32(self, "subtype")
+        for i in xrange(16):
+            yield UInt32(self, "mipmap_offset[]")
+        for i in xrange(16):
+            yield UInt32(self, "mipmap_size[]")
+
+        compression = self["compression"].value
+        image_type = self["type"].value
+        width = self["width"].value
+        height = self["height"].value
+
+        if compression == 0: # JPEG Compression
+            yield UInt32(self, "jpeg_header_len")
+            yield RawBytes(self, "jpeg_header", self["jpeg_header_len"].value, "Shared JPEG Header")
+        else:
+            yield PaletteRGBA(self, "palette", 256)
+
+        offsets = self.array("mipmap_offset")
+        sizes = self.array("mipmap_size")
+        for i in xrange(16):
+            if not offsets[i].value or not sizes[i].value:
+                continue
+            padding = self.seekByte(offsets[i].value)
+            if padding:
+                yield padding
+            if compression == 0:
+                yield RawBytes(self, "mipmap[%i]" % i, sizes[i].value, "JPEG data, append to header to recover complete image")
+            elif compression == 1:
+                yield Generic2DArray(self, "mipmap_indexes[%i]" % i, width, height, PaletteIndex, "row", "index", "Indexes into the palette")
+                if image_type in (3, 4):
+                    yield Generic2DArray(self, "mipmap_alphas[%i]" % i, width, height, UInt8, "row", "alpha", "Alpha values")
+            width /= 2
+            height /= 2
+
+def interp_avg(data_low, data_high, n):
+    """Interpolated averages. For example,
+
+    >>> list(interp_avg(1, 10, 3))
+    [4, 7]
+    """
+    if isinstance(data_low, (int, long)):
+        for i in range(1, n):
+            yield (data_low * (n-i) + data_high * i) / n
+    else: # iterable
+        pairs = zip(data_low, data_high)
+        pair_iters = [interp_avg(x, y, n) for x, y in pairs]
+        for i in range(1, n):
+            yield [iter.next() for iter in pair_iters]
+
+def color_name(data, bits):
+    """Color names in #RRGGBB format, given the number of bits for each component."""
+    ret = ["#"]
+    for i in range(3):
+        ret.append("%02X" % (data[i] << (8-bits[i])))
+    return ''.join(ret)
+
+class DXT1(FieldSet):
+    static_size = 64
+    def __init__(self, parent, name, dxt2_mode=False, *args, **kwargs):
+        """with dxt2_mode on, this field will always use the four color model"""
+        FieldSet.__init__(self, parent, name, *args, **kwargs)
+        self.dxt2_mode = dxt2_mode
+    def createFields(self):
+        values = [[], []]
+        for i in (0, 1):
+            yield Bits(self, "blue[]", 5)
+            yield Bits(self, "green[]", 6)
+            yield Bits(self, "red[]", 5)
+            values[i] = [self["red[%i]" % i].value,
+                         self["green[%i]" % i].value,
+                         self["blue[%i]" % i].value]
+        if values[0] > values[1] or self.dxt2_mode:
+            values += interp_avg(values[0], values[1], 3)
+        else:
+            values += interp_avg(values[0], values[1], 2)
+            values.append(None) # transparent
+        for i in xrange(16):
+            pixel = Bits(self, "pixel[%i][%i]" % divmod(i, 4), 2)
+            color = values[pixel.value]
+            if color is None:
+                pixel._description = "Transparent"
+            else:
+                pixel._description = "RGB color: %s" % color_name(color, [5, 6, 5])
+            yield pixel
+
+class DXT3Alpha(FieldSet):
+    static_size = 64
+    def createFields(self):
+        for i in xrange(16):
+            yield Bits(self, "alpha[%i][%i]" % divmod(i, 4), 4)
+
+class DXT3(FieldSet):
+    static_size = 128
+    def createFields(self):
+        yield DXT3Alpha(self, "alpha", "Alpha Channel Data")
+        yield DXT1(self, "color", True, "Color Channel Data")
+
+class DXT5Alpha(FieldSet):
+    static_size = 64
+    def createFields(self):
+        values = []
+        yield UInt8(self, "alpha_val[0]", "First alpha value")
+        values.append(self["alpha_val[0]"].value)
+        yield UInt8(self, "alpha_val[1]", "Second alpha value")
+        values.append(self["alpha_val[1]"].value)
+        if values[0] > values[1]:
+            values += interp_avg(values[0], values[1], 7)
+        else:
+            values += interp_avg(values[0], values[1], 5)
+            values += [0, 255]
+        for i in xrange(16):
+            pixel = Bits(self, "alpha[%i][%i]" % divmod(i, 4), 3)
+            alpha = values[pixel.value]
+            pixel._description = "Alpha value: %i" % alpha
+            yield pixel
+
+class DXT5(FieldSet):
+    static_size = 128
+    def createFields(self):
+        yield DXT5Alpha(self, "alpha", "Alpha Channel Data")
+        yield DXT1(self, "color", True, "Color Channel Data")
+
+class BLP2File(Parser):
+    MAGIC = "BLP2"
+    PARSER_TAGS = {
+        "id": "blp2",
+        "category": "game",
+        "file_ext": ("blp",),
+        "mime": (u"application/x-blp",),
+        "magic": ((MAGIC, 0),),
+        "min_size": 5*32,   # 5 DWORDs start, incl. magic
+        "description": "Blizzard Image Format, version 2",
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != "BLP2":
+            return "Invalid magic"
+        return True
+
+    def createFields(self):
+        yield String(self, "magic", 4, "Signature (BLP2)")
+        yield Enum(UInt32(self, "compression", "Compression type"), {
+            0:"JPEG Compressed",
+            1:"Uncompressed or DXT/S3TC compressed"})
+        yield Enum(UInt8(self, "encoding", "Encoding type"), {
+            1:"Raw",
+            2:"DXT/S3TC Texture Compression (a.k.a. DirectX)"})
+        yield UInt8(self, "alpha_depth", "Alpha channel depth, in bits (0 = no alpha)")
+        yield Enum(UInt8(self, "alpha_encoding", "Encoding used for alpha channel"), {
+            0:"DXT1 alpha (0 or 1 bit alpha)",
+            1:"DXT3 alpha (4 bit alpha)",
+            7:"DXT5 alpha (8 bit interpolated alpha)"})
+        yield Enum(UInt8(self, "has_mips", "Are mip levels present?"), {
+            0:"No mip levels",
+            1:"Mip levels present; number of levels determined by image size"})
+        yield UInt32(self, "width", "Base image width")
+        yield UInt32(self, "height", "Base image height")
+        for i in xrange(16):
+            yield UInt32(self, "mipmap_offset[]")
+        for i in xrange(16):
+            yield UInt32(self, "mipmap_size[]")
+        yield PaletteRGBA(self, "palette", 256)
+
+        compression = self["compression"].value
+        encoding = self["encoding"].value
+        alpha_depth = self["alpha_depth"].value
+        alpha_encoding = self["alpha_encoding"].value
+        width = self["width"].value
+        height = self["height"].value
+
+        if compression == 0: # JPEG Compression
+            yield UInt32(self, "jpeg_header_len")
+            yield RawBytes(self, "jpeg_header", self["jpeg_header_len"].value, "Shared JPEG Header")
+
+        offsets = self.array("mipmap_offset")
+        sizes = self.array("mipmap_size")
+        for i in xrange(16):
+            if not offsets[i].value or not sizes[i].value:
+                continue
+            padding = self.seekByte(offsets[i].value)
+            if padding:
+                yield padding
+            if compression == 0:
+                yield RawBytes(self, "mipmap[%i]" % i, sizes[i].value, "JPEG data, append to header to recover complete image")
+            elif compression == 1 and encoding == 1:
+                yield Generic2DArray(self, "mipmap_indexes[%i]" % i, height, width, PaletteIndex, "row", "index", "Indexes into the palette")
+                if alpha_depth == 1:
+                    yield GenericVector(self, "mipmap_alphas[%i]" % i, height, width, Bit, "row", "is_opaque", "Alpha values")
+                elif alpha_depth == 8:
+                    yield GenericVector(self, "mipmap_alphas[%i]" % i, height, width, UInt8, "row", "alpha", "Alpha values")
+            elif compression == 1 and encoding == 2:
+                block_height = alignValue(height, 4) // 4
+                block_width = alignValue(width, 4) // 4
+                if alpha_depth in [0, 1] and alpha_encoding == 0:
+                    yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT1, "row", "block", "DXT1-compressed image blocks")
+                elif alpha_depth == 8 and alpha_encoding == 1:
+                    yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT3, "row", "block", "DXT3-compressed image blocks")
+                elif alpha_depth == 8 and alpha_encoding == 7:
+                    yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT5, "row", "block", "DXT5-compressed image blocks")
+            width /= 2
+            height /= 2
diff --git a/lib/hachoir_parser/game/laf.py b/lib/hachoir_parser/game/laf.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a8e15cfb88da9d7a7857230075917867816b99a
--- /dev/null
+++ b/lib/hachoir_parser/game/laf.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+
+"""
+LucasArts Font parser.
+
+Author: Cyril Zorin
+Creation date: 1 January 2007
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+        UInt8, UInt16, UInt32, GenericVector)
+from hachoir_core.endian import LITTLE_ENDIAN
+
+class CharData(FieldSet):
+  def __init__(self, chars, *args):
+    FieldSet.__init__(self, *args)
+    self.chars = chars
+
+  def createFields(self):
+    for char in self.chars:
+      yield CharBitmap(char, self, "char_bitmap[]")
+
+class CharBitmap(FieldSet):
+  def __init__(self, char, *args):
+    FieldSet.__init__(self, *args)
+    self.char = char
+
+  def createFields(self):
+    width = self.char["width_pixels"].value
+    for line in xrange(self.char["height_pixels"].value):
+      yield GenericVector(self, "line[]", width,
+                          UInt8, "pixel")
+
+class CharInfo(FieldSet):
+  static_size = 16 * 8
+
+  def createFields(self):
+    yield UInt32(self, "data_offset")
+    yield UInt8(self, "logical_width")
+    yield UInt8(self, "unknown[]")
+    yield UInt8(self, "unknown[]")
+    yield UInt8(self, "unknown[]")
+    yield UInt32(self, "width_pixels")
+    yield UInt32(self, "height_pixels")
+
+class LafFile(Parser):
+  PARSER_TAGS = {
+    "id": "lucasarts_font",
+    "category": "game",
+    "file_ext" : ("laf",),
+    "min_size" : 32*8,
+    "description" : "LucasArts Font"
+    }
+
+  endian = LITTLE_ENDIAN
+
+  def validate(self):
+    if self["num_chars"].value != 256:
+        return "Invalid number of characters (%u)" % self["num_chars"].value
+    if self["first_char_code"].value != 0:
+        return "Invalid of code of first character code (%u)" % self["first_char_code"].value
+    if self["last_char_code"].value != 255:
+        return "Invalid of code of last character code (%u)" % self["last_char_code"].value
+    if self["char_codes/char[0]"].value != 0:
+        return "Invalid character code #0 (%u)" % self["char_codes/char[0]"].value
+    if self["chars/char[0]/data_offset"].value != 0:
+        return "Invalid character #0 offset"
+    return True
+
+  def createFields(self):
+    yield UInt32(self, "num_chars")
+    yield UInt32(self, "raw_font_data_size")
+    yield UInt32(self, "max_char_width")
+    yield UInt32(self, "min_char_width")
+    yield UInt32(self, "unknown[]", 4)
+    yield UInt32(self, "unknown[]", 4)
+    yield UInt32(self, "first_char_code")
+    yield UInt32(self, "last_char_code")
+
+    yield GenericVector(self, "char_codes", self["num_chars"].value,
+            UInt16, "char")
+
+    yield GenericVector(self, "chars", self["num_chars"].value,
+            CharInfo, "char")
+
+    # character data. we make an effort to provide
+    # something more meaningful than "RawBytes:
+    # character bitmap data"
+    yield CharData(self["chars"], self, "char_data")
+
+    # read to the end
+    if self.current_size < self._size:
+      yield self.seekBit(self._size, "unknown[]")
diff --git a/lib/hachoir_parser/game/spider_man_video.py b/lib/hachoir_parser/game/spider_man_video.py
new file mode 100644
index 0000000000000000000000000000000000000000..b9092f33de3d922198d2f639701b41020d4a83d5
--- /dev/null
+++ b/lib/hachoir_parser/game/spider_man_video.py
@@ -0,0 +1,65 @@
+"""
+Parser for an obscure FMV file format: bin files from the game
+"The Amazing Spider-Man vs. The Kingpin" (Sega CD)
+
+Author: Mike Melanson
+Creation date: 2006-09-30
+File samples: http://samples.mplayerhq.hu/game-formats/spiderman-segacd-bin/
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import FieldSet, UInt32, String, RawBytes
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+class Chunk(FieldSet):
+    tag_info = {
+        "CONF" : ("conf[]", None, "Configuration header"),
+        "AUDI" : ("audio[]", None, "Audio chunk"),
+        "SYNC" : ("sync[]", None, "Start of video frame data"),
+        "IVRA" : ("ivra[]", None, "Vector codebook (?)"),
+        "VRAM" : ("video[]", None, "Video RAM tile pattern"),
+        "CRAM" : ("color[]", None, "Color RAM (palette)"),
+        "CEND" : ("video_end[]", None, "End of video data"),
+        "MEND" : ("end_file", None, "End of file"),
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["length"].value * 8
+        fourcc = self["fourcc"].value
+        if fourcc in self.tag_info:
+            self._name, self._parser, self._description = self.tag_info[fourcc]
+        else:
+            self._parser = None
+            self._description = "Unknown chunk: fourcc %s" % self["fourcc"].display
+
+    def createFields(self):
+        yield String(self, "fourcc", 4, "FourCC", charset="ASCII")
+        yield textHandler(UInt32(self, "length", "length"), hexadecimal)
+        size = self["length"].value - 8
+        if 0 < size:
+            if self._parser:
+                for field in self._parser(self, size):
+                    yield field
+            else:
+                yield RawBytes(self, "data", size)
+
+class SpiderManVideoFile(Parser):
+    PARSER_TAGS = {
+        "id": "spiderman_video",
+        "category": "game",
+        "file_ext": ("bin",),
+        "min_size": 8*8,
+        "description": "The Amazing Spider-Man vs. The Kingpin (Sega CD) FMV video"
+    }
+
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        return (self.stream.readBytes(0, 4) == 'CONF')
+
+    def createFields(self):
+        while not self.eof:
+            yield Chunk(self, "chunk[]")
+
diff --git a/lib/hachoir_parser/game/zsnes.py b/lib/hachoir_parser/game/zsnes.py
new file mode 100644
index 0000000000000000000000000000000000000000..a8f7550614f12b584ec1418284355e7e3b5bf7c7
--- /dev/null
+++ b/lib/hachoir_parser/game/zsnes.py
@@ -0,0 +1,250 @@
+"""
+ZSNES Save State Parser (v143 only currently)
+
+Author: Jason Gorski
+Creation date: 2006-09-15
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, StaticFieldSet,
+    UInt8, UInt16, UInt32,
+    String, PaddingBytes, Bytes, RawBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+
+class ZSTHeader(StaticFieldSet):
+    format = (
+        (String, "zs_mesg", 26, "File header", {"charset": "ASCII"}),
+        (UInt8, "zs_mesglen", "File header string len"),
+        (UInt8, "zs_version", "Version minor #"),
+        (UInt8, "curcyc", "cycles left in scanline"),
+        (UInt16, "curypos", "current y position"),
+        (UInt8, "cacheud", "update cache every ? frames"),
+        (UInt8, "ccud", "current cache increment"),
+        (UInt8, "intrset", "interrupt set"),
+        (UInt8, "cycpl", "cycles per scanline"),
+        (UInt8, "cycphb", "cycles per hblank"),
+        (UInt8, "spcon", "SPC Enable (1=enabled)"),
+        (UInt16, "stackand", "value to and stack to keep it from going to the wrong area"),
+        (UInt16, "stackor", "value to or stack to keep it from going to the wrong area"),
+    )
+
+class ZSTcpu(StaticFieldSet):
+    format = (
+        (UInt16, "xat"),
+        (UInt8, "xdbt"),
+        (UInt8, "xpbt"),
+        (UInt16, "xst"),
+        (UInt16, "xdt"),
+        (UInt16, "xxt"),
+        (UInt16, "xyt"),
+        (UInt8, "xp"),
+        (UInt8, "xe"),
+        (UInt16, "xpc"),
+        (UInt8, "xirqb", "which bank the irqs start at"),
+        (UInt8, "debugger", "Start with debugger (1: yes, 0: no)"),
+        (UInt32, "Curtable" "Current table address"),
+        (UInt8, "curnmi", "if in NMI (1=yes)"),
+        (UInt32, "cycpbl", "percentage left of CPU/SPC to run (3.58 = 175)"),
+        (UInt32, "cycpblt", "percentage of CPU/SPC to run"),
+    )
+
+class ZSTppu(FieldSet):
+    static_size = 3019*8
+    def createFields(self):
+        yield UInt8(self, "sndrot", "rotates to use A,X or Y for sound skip")
+        yield UInt8(self, "sndrot2", "rotates a random value for sound skip")
+        yield UInt8(self, "INTEnab", "enables NMI(7)/VIRQ(5)/HIRQ(4)/JOY(0)")
+        yield UInt8(self, "NMIEnab", "controlled in e65816 loop. Sets to 81h")
+        yield UInt16(self, "VIRQLoc", "VIRQ Y location")
+        yield UInt8(self, "vidbright", "screen brightness 0..15")
+        yield UInt8(self, "previdbr", "previous screen brightness")
+        yield UInt8(self, "forceblnk", "force blanking on/off ($80=on)")
+        yield UInt32(self, "objptr", "pointer to object data in VRAM")
+        yield UInt32(self, "objptrn", "pointer2 to object data in VRAM")
+        yield UInt8(self, "objsize1", "1=8dot, 4=16dot, 16=32dot, 64=64dot")
+        yield UInt8(self, "objsize2", "large object size")
+        yield UInt8(self, "objmovs1", "number of bytes to move/paragraph")
+        yield UInt16(self, "objadds1", "number of bytes to add/paragraph")
+        yield UInt8(self, "objmovs2", "number of bytes to move/paragraph")
+        yield UInt16(self, "objadds2", "number of bytes to add/paragraph")
+        yield UInt16(self, "oamaddrt", "oam address")
+        yield UInt16(self, "oamaddrs", "oam address at beginning of vblank")
+        yield UInt8(self, "objhipr", "highest priority object #")
+        yield UInt8(self, "bgmode", "graphics mode 0..7")
+        yield UInt8(self, "bg3highst", "is 1 if background 3 has the highest priority")
+        yield UInt8(self, "bgtilesz", "0=8x8, 1=16x16 bit0=bg1, bit1=bg2, etc.")
+        yield UInt8(self, "mosaicon", "mosaic on, bit 0=bg1, bit1=bg2, etc.")
+        yield UInt8(self, "mosaicsz", "mosaic size in pixels")
+        yield UInt16(self, "bg1ptr", "pointer to background1")
+        yield UInt16(self, "bg2ptr", "pointer to background2")
+        yield UInt16(self, "bg3ptr", "pointer to background3")
+        yield UInt16(self, "bg4ptr", "pointer to background4")
+        yield UInt16(self, "bg1ptrb", "pointer to background1")
+        yield UInt16(self, "bg2ptrb", "pointer to background2")
+        yield UInt16(self, "bg3ptrb", "pointer to background3")
+        yield UInt16(self, "bg4ptrb", "pointer to background4")
+        yield UInt16(self, "bg1ptrc", "pointer to background1")
+        yield UInt16(self, "bg2ptrc", "pointer to background2")
+        yield UInt16(self, "bg3ptrc", "pointer to background3")
+        yield UInt16(self, "bg4ptrc", "pointer to background4")
+        yield UInt16(self, "bg1ptrd", "pointer to background1")
+        yield UInt16(self, "bg2ptrd", "pointer to background2")
+        yield UInt16(self, "bg3ptrd", "pointer to background3")
+        yield UInt16(self, "bg4ptrd", "pointer to background4")
+        yield UInt8(self, "bg1scsize", "bg #1 screen size (0=1x1,1=1x2,2=2x1,3=2x2)")
+        yield UInt8(self, "bg2scsize", "bg #2 screen size (0=1x1,1=1x2,2=2x1,3=2x2)")
+        yield UInt8(self, "bg3scsize", "bg #3 screen size (0=1x1,1=1x2,2=2x1,3=2x2)")
+        yield UInt8(self, "bg4scsize", "bg #4 screen size (0=1x1,1=1x2,2=2x1,3=2x2)")
+        yield UInt16(self, "bg1objptr", "pointer to tiles in background1")
+        yield UInt16(self, "bg2objptr", "pointer to tiles in background2")
+        yield UInt16(self, "bg3objptr", "pointer to tiles in background3")
+        yield UInt16(self, "bg4objptr", "pointer to tiles in background4")
+        yield UInt16(self, "bg1scrolx", "background 1 x position")
+        yield UInt16(self, "bg2scrolx", "background 2 x position")
+        yield UInt16(self, "bg3scrolx", "background 3 x position")
+        yield UInt16(self, "bg4scrolx", "background 4 x position")
+        yield UInt16(self, "bg1sx", "Temporary Variable for Debugging purposes")
+        yield UInt16(self, "bg1scroly", "background 1 y position")
+        yield UInt16(self, "bg2scroly", "background 2 y position")
+        yield UInt16(self, "bg3scroly", "background 3 y position")
+        yield UInt16(self, "bg4scroly", "background 4 y position")
+        yield UInt16(self, "addrincr", "vram increment (2,64,128,256)")
+        yield UInt8(self, "vramincr", "0 = increment at 2118/2138, 1 = 2119,213A")
+        yield UInt8(self, "vramread", "0 = address set, 1 = already read once")
+        yield UInt32(self, "vramaddr", "vram address")
+
+        yield UInt16(self, "cgaddr", "cg (palette)")
+        yield UInt8(self, "cgmod", "if cgram is modified or not")
+        yield UInt16(self, "scrnon", "main & sub screen on")
+        yield UInt8(self, "scrndist", "which background is disabled")
+        yield UInt16(self, "resolutn", "screen resolution")
+        yield UInt8(self, "multa", "multiplier A")
+        yield UInt16(self, "diva", "divisor C")
+        yield UInt16(self, "divres", "quotent of divc/divb")
+        yield UInt16(self, "multres", "result of multa * multb/remainder of divc/divb")
+        yield UInt16(self, "latchx", "latched x value")
+        yield UInt16(self, "latchy", "latched y value")
+        yield UInt8(self, "latchxr", "low or high byte read for x value")
+        yield UInt8(self, "latchyr", "low or high byte read for y value")
+        yield UInt8(self, "frskipper", "used to control frame skipping")
+        yield UInt8(self, "winl1", "window 1 left position")
+        yield UInt8(self, "winr1", "window 1 right position")
+        yield UInt8(self, "winl2", "window 2 left position")
+        yield UInt8(self, "winr2", "window 2 right position")
+        yield UInt8(self, "winbg1en", "Win1 on (IN/OUT) or Win2 on (IN/OUT) on BG1")
+        yield UInt8(self, "winbg2en", "Win1 on (IN/OUT) or Win2 on (IN/OUT) on BG2")
+        yield UInt8(self, "winbg3en", "Win1 on (IN/OUT) or Win2 on (IN/OUT) on BG3")
+        yield UInt8(self, "winbg4en", "Win1 on (IN/OUT) or Win2 on (IN/OUT) on BG4")
+        yield UInt8(self, "winobjen", "Win1 on (IN/OUT) or Win2 on (IN/OUT) on sprites")
+        yield UInt8(self, "wincolen", "Win1 on (IN/OUT) or Win2 on (IN/OUT) on backarea")
+        yield UInt8(self, "winlogica", "Window logic type for BG1 to 4")
+        yield UInt8(self, "winlogicb", "Window logic type for Sprites and Backarea")
+        yield UInt8(self, "winenabm", "Window logic enable for main screen")
+        yield UInt8(self, "winenabs", "Window logic enable for sub sceen")
+        yield UInt8(self, "mode7set", "mode 7 settings")
+        yield UInt16(self, "mode7A", "A value for Mode 7")
+        yield UInt16(self, "mode7B", "B value for Mode 7")
+        yield UInt16(self, "mode7C", "C value for Mode 7")
+        yield UInt16(self, "mode7D", "D value for Mode 7")
+        yield UInt16(self, "mode7X0", "Center X for Mode 7")
+        yield UInt16(self, "mode7Y0", "Center Y for Mode 7")
+        yield UInt8(self, "JoyAPos", "Old-Style Joystick Read Position for Joy 1 & 3")
+        yield UInt8(self, "JoyBPos", "Old-Style Joystick Read Position for Joy 2 & 4")
+        yield UInt32(self, "compmult", "Complement Multiplication for Mode 7")
+        yield UInt8(self, "joyalt", "temporary joystick alternation")
+        yield UInt32(self, "wramrwadr", "continuous read/write to wram address")
+        yield RawBytes(self, "dmadata", 129, "dma data (written from ports 43xx)")
+        yield UInt8(self, "irqon", "if IRQ has been called (80h) or not (0)")
+        yield UInt8(self, "nexthdma", "HDMA data to execute once vblank ends")
+        yield UInt8(self, "curhdma", "Currently executed hdma")
+        yield RawBytes(self, "hdmadata", 152, "4 dword register addresses, # bytes to transfer/line, address increment (word)")
+        yield UInt8(self, "hdmatype", "if first time executing hdma or not")
+        yield UInt8(self, "coladdr", "red value of color to add")
+        yield UInt8(self, "coladdg", "green value of color to add")
+        yield UInt8(self, "coladdb", "blue value of color to add")
+        yield UInt8(self, "colnull", "keep this 0 (when accessing colors by dword)")
+        yield UInt8(self, "scaddset", "screen/fixed color addition settings")
+        yield UInt8(self, "scaddtype", "which screen to add/sub")
+        yield UInt8(self, "Voice0Disabl2", "Disable Voice 0")
+        yield UInt8(self, "Voice1Disabl2", "Disable Voice 1")
+        yield UInt8(self, "Voice2Disabl2", "Disable Voice 2")
+        yield UInt8(self, "Voice3Disabl2", "Disable Voice 3")
+        yield UInt8(self, "Voice4Disabl2", "Disable Voice 4")
+        yield UInt8(self, "Voice5Disabl2", "Disable Voice 5")
+        yield UInt8(self, "Voice6Disabl2", "Disable Voice 6")
+        yield UInt8(self, "Voice7Disabl2", "Disable Voice 7")
+        yield RawBytes(self, "oamram", 1024, "OAMRAM (544 bytes)")
+        yield RawBytes(self, "cgram", 512, "CGRAM")
+        yield RawBytes(self, "pcgram", 512, "Previous CGRAM")
+        yield UInt8(self, "vraminctype")
+        yield UInt8(self, "vramincby8on", "if increment by 8 is on")
+        yield UInt8(self, "vramincby8left", "how many left")
+        yield UInt8(self, "vramincby8totl", "how many in total (32,64,128)")
+        yield UInt8(self, "vramincby8rowl", "how many left in that row (start at 8)")
+        yield UInt16(self, "vramincby8ptri", "increment by how many when rowl = 0")
+        yield UInt8(self, "nexthprior")
+        yield UInt8(self, "doirqnext")
+        yield UInt16(self, "vramincby8var")
+        yield UInt8(self, "screstype")
+        yield UInt8(self, "extlatch")
+        yield UInt8(self, "cfield")
+        yield UInt8(self, "interlval")
+        yield UInt16(self, "HIRQLoc HIRQ X")
+
+        # NEWer ZST format
+        yield UInt8(self, "KeyOnStA")
+        yield UInt8(self, "KeyOnStB")
+        yield UInt8(self, "SDD1BankA")
+        yield UInt8(self, "SDD1BankB")
+        yield UInt8(self, "SDD1BankC")
+        yield UInt8(self, "SDD1BankD")
+        yield UInt8(self, "vramread2")
+        yield UInt8(self, "nosprincr")
+        yield UInt16(self, "poamaddrs")
+        yield UInt8(self, "ioportval")
+        yield UInt8(self, "iohvlatch")
+        yield UInt8(self, "ppustatus")
+
+        yield PaddingBytes(self, "tempdat", 477, "Reserved/Unused")
+
+class ZSNESFile(Parser):
+    PARSER_TAGS = {
+        "id": "zsnes",
+        "category": "game",
+        "description": "ZSNES Save State File (only version 143)",
+        "min_size": 3091*8,
+        "file_ext": ("zst", "zs1", "zs2", "zs3", "zs4", "zs5", "zs6",
+            "zs7", "zs8", "zs9")
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        temp = self.stream.readBytes(0,28)
+        if temp[0:26] != "ZSNES Save State File V143":
+            return "Wrong header"
+        if ord(temp[27:28]) != 143: # extra...
+            return "Wrong save version %d <> 143" % temp[27:1]
+        return True
+
+    def seek(self, offset):
+        padding = self.seekByte(offset, relative=False)
+        if padding is not None:
+            yield padding
+
+    def createFields(self):
+        yield ZSTHeader(self, "header", "ZST header") # Offset: 0
+        yield ZSTcpu(self, "cpu", "ZST cpu registers") # 41
+        yield ZSTppu(self, "ppu", "ZST CPU registers") # 72
+        yield RawBytes(self, "wram7E", 65536) # 3091
+        yield RawBytes(self, "wram7F", 65536) # 68627
+        yield RawBytes(self, "vram", 65536) # 134163
+
+        # TODO: Interpret extra on-cart chip data found at/beyond... 199699
+
+        # TODO: Interpret Thumbnail/Screenshot data found at 275291
+        # 64*56*2(16bit colors) = 7168
+        padding = self.seekByte(275291, relative=False)
+        if padding is not None:
+            yield padding
+        yield Bytes(self, "thumbnail", 7168, "Thumbnail of playing game in some sort of raw 64x56x16-bit RGB mode?")
+
diff --git a/lib/hachoir_parser/guess.py b/lib/hachoir_parser/guess.py
new file mode 100644
index 0000000000000000000000000000000000000000..1f77b48b5f2ad1f614a7d3ba380d9bdccdac35d4
--- /dev/null
+++ b/lib/hachoir_parser/guess.py
@@ -0,0 +1,123 @@
+"""
+Parser list managment:
+- createParser() find the best parser for a file.
+"""
+
+import os
+from hachoir_core.error import warning, info, HACHOIR_ERRORS
+from hachoir_parser import ValidateError, HachoirParserList
+from hachoir_core.stream import FileInputStream
+from hachoir_core.i18n import _
+
+
+class QueryParser(object):
+    fallback = None
+    other = None
+
+    def __init__(self, tags):
+        self.validate = True
+        self.use_fallback = False
+        self.parser_args = None
+        self.db = HachoirParserList.getInstance()
+        self.parsers = set(self.db)
+        parsers = []
+        for tag in tags:
+            if not self.parsers:
+                break
+            parsers += self._getByTag(tag)
+            if self.fallback is None:
+                self.fallback = len(parsers) == 1
+        if self.parsers:
+            other = len(parsers)
+            parsers += list(self.parsers)
+            self.other = parsers[other]
+        self.parsers = parsers
+
+    def __iter__(self):
+        return iter(self.parsers)
+
+    def translate(self, name, value):
+        if name == "filename":
+            filename = os.path.basename(value).split(".")
+            if len(filename) <= 1:
+                value = ""
+            else:
+                value = filename[-1].lower()
+            name = "file_ext"
+        return name, value
+
+    def _getByTag(self, tag):
+        if tag is None:
+            self.parsers.clear()
+            return []
+        elif callable(tag):
+            parsers = [ parser for parser in self.parsers if tag(parser) ]
+            for parser in parsers:
+                self.parsers.remove(parser)
+        elif tag[0] == "class":
+            self.validate = False
+            return [ tag[1] ]
+        elif tag[0] == "args":
+            self.parser_args = tag[1]
+            return []
+        else:
+            tag = self.translate(*tag)
+            parsers = []
+            if tag is not None:
+                key = tag[0]
+                byname = self.db.bytag.get(key,{})
+                if tag[1] is None:
+                    values = byname.itervalues()
+                else:
+                    values = byname.get(tag[1],()),
+                if key == "id" and values:
+                    self.validate = False
+                for value in values:
+                    for parser in value:
+                        if parser in self.parsers:
+                            parsers.append(parser)
+                            self.parsers.remove(parser)
+        return parsers
+
+    def parse(self, stream, fallback=True):
+        fb = None
+        warn = warning
+        for parser in self.parsers:
+            try:
+                parser_obj = parser(stream, validate=self.validate)
+                if self.parser_args:
+                    for key, value in self.parser_args.iteritems():
+                        setattr(parser_obj, key, value)
+                return parser_obj
+            except ValidateError, err:
+                res = unicode(err)
+                if fallback and self.fallback:
+                    fb = parser
+            except HACHOIR_ERRORS, err:
+                res = unicode(err)
+            if warn:
+                if parser == self.other:
+                    warn = info
+                warn(_("Skip parser '%s': %s") % (parser.__name__, res))
+            fallback = False
+        if self.use_fallback and fb:
+            warning(_("Force use of parser '%s'") % fb.__name__)
+            return fb(stream)
+
+
+def guessParser(stream):
+    return QueryParser(stream.tags).parse(stream)
+
+
+def createParser(filename, real_filename=None, tags=None):
+    """
+    Create a parser from a file or returns None on error.
+
+    Options:
+    - filename (unicode): Input file name ;
+    - real_filename (str|unicode): Real file name.
+    """
+    if not tags:
+        tags = []
+    stream = FileInputStream(filename, real_filename, tags=tags)
+    return guessParser(stream)
diff --git a/lib/hachoir_parser/image/__init__.py b/lib/hachoir_parser/image/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..78c9c20bc10ffa4efa8a9e78fd6b18d3a9a6c7ac
--- /dev/null
+++ b/lib/hachoir_parser/image/__init__.py
@@ -0,0 +1,12 @@
+from hachoir_parser.image.bmp import BmpFile
+from hachoir_parser.image.gif import GifFile
+from hachoir_parser.image.ico import IcoFile
+from hachoir_parser.image.jpeg import JpegFile
+from hachoir_parser.image.pcx import PcxFile
+from hachoir_parser.image.psd import PsdFile
+from hachoir_parser.image.png import PngFile
+from hachoir_parser.image.tga import TargaFile
+from hachoir_parser.image.tiff import TiffFile
+from hachoir_parser.image.wmf import WMF_File
+from hachoir_parser.image.xcf import XcfFile
+
diff --git a/lib/hachoir_parser/image/bmp.py b/lib/hachoir_parser/image/bmp.py
new file mode 100644
index 0000000000000000000000000000000000000000..c4865d3bb1145d79e9ce6906f5acfc2059ac1fe2
--- /dev/null
+++ b/lib/hachoir_parser/image/bmp.py
@@ -0,0 +1,195 @@
+"""
+Microsoft Bitmap picture parser.
+- file extension: ".bmp"
+
+Author: Victor Stinner
+Creation: 16 december 2005
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt8, UInt16, UInt32, Bits,
+    String, RawBytes, Enum,
+    PaddingBytes, NullBytes, createPaddingField)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_parser.image.common import RGB, PaletteRGBA
+from hachoir_core.tools import alignValue
+
+class Pixel4bit(Bits):
+    static_size = 4
+    def __init__(self, parent, name):
+        Bits.__init__(self, parent, name, 4)
+
+class ImageLine(FieldSet):
+    def __init__(self, parent, name, width, pixel_class):
+        FieldSet.__init__(self, parent, name)
+        self._pixel = pixel_class
+        self._width = width
+        self._size = alignValue(self._width * self._pixel.static_size, 32)
+
+    def createFields(self):
+        for x in xrange(self._width):
+            yield self._pixel(self, "pixel[]")
+        size = self.size - self.current_size
+        if size:
+            yield createPaddingField(self, size)
+
+class ImagePixels(FieldSet):
+    def __init__(self, parent, name, width, height, pixel_class, size=None):
+        FieldSet.__init__(self, parent, name, size=size)
+        self._width = width
+        self._height = height
+        self._pixel = pixel_class
+
+    def createFields(self):
+        for y in xrange(self._height-1, -1, -1):
+            yield ImageLine(self, "line[%u]" % y, self._width, self._pixel)
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield NullBytes(self, "padding", size)
+
+class CIEXYZ(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "x")
+        yield UInt32(self, "y")
+        yield UInt32(self, "z")
+
+class BmpHeader(FieldSet):
+    color_space_name = {
+        1: "Business (Saturation)",
+        2: "Graphics (Relative)",
+        4: "Images (Perceptual)",
+        8: "Absolute colormetric (Absolute)",
+    }
+
+    def getFormatVersion(self):
+        if "gamma_blue" in self:
+            return 4
+        if "important_color" in self:
+            return 3
+        return 2
+
+    def createFields(self):
+        # Version 2 (12 bytes)
+        yield UInt32(self, "header_size", "Header size")
+        yield UInt32(self, "width", "Width (pixels)")
+        yield UInt32(self, "height", "Height (pixels)")
+        yield UInt16(self, "nb_plan", "Number of plan (=1)")
+        yield UInt16(self, "bpp", "Bits per pixel") # may be zero for PNG/JPEG picture
+
+        # Version 3 (40 bytes)
+        if self["header_size"].value < 40:
+            return
+        yield Enum(UInt32(self, "compression", "Compression method"), BmpFile.COMPRESSION_NAME)
+        yield UInt32(self, "image_size", "Image size (bytes)")
+        yield UInt32(self, "horizontal_dpi", "Horizontal DPI")
+        yield UInt32(self, "vertical_dpi", "Vertical DPI")
+        yield UInt32(self, "used_colors", "Number of color used")
+        yield UInt32(self, "important_color", "Number of import colors")
+
+        # Version 4 (108 bytes)
+        if self["header_size"].value < 108:
+            return
+        yield textHandler(UInt32(self, "red_mask"), hexadecimal)
+        yield textHandler(UInt32(self, "green_mask"), hexadecimal)
+        yield textHandler(UInt32(self, "blue_mask"), hexadecimal)
+        yield textHandler(UInt32(self, "alpha_mask"), hexadecimal)
+        yield Enum(UInt32(self, "color_space"), self.color_space_name)
+        yield CIEXYZ(self, "red_primary")
+        yield CIEXYZ(self, "green_primary")
+        yield CIEXYZ(self, "blue_primary")
+        yield UInt32(self, "gamma_red")
+        yield UInt32(self, "gamma_green")
+        yield UInt32(self, "gamma_blue")
+
+def parseImageData(parent, name, size, header):
+    if ("compression" not in header) or (header["compression"].value in (0, 3)):
+        width = header["width"].value
+        height = header["height"].value
+        bpp = header["bpp"].value
+        if bpp == 32:
+            cls = UInt32
+        elif bpp == 24:
+            cls = RGB
+        elif bpp == 8:
+            cls = UInt8
+        elif bpp == 4:
+            cls = Pixel4bit
+        else:
+            cls = None
+        if cls:
+            return ImagePixels(parent, name, width, height, cls, size=size*8)
+    return RawBytes(parent, name, size)
+
+class BmpFile(Parser):
+    PARSER_TAGS = {
+        "id": "bmp",
+        "category": "image",
+        "file_ext": ("bmp",),
+        "mime": (u"image/x-ms-bmp", u"image/x-bmp"),
+        "min_size": 30*8,
+#        "magic": (("BM", 0),),
+        "magic_regex": ((
+            # "BM", <filesize>, <reserved>, header_size=(12|40|108)
+            "BM.{4}.{8}[\x0C\x28\x6C]\0{3}",
+        0),),
+        "description": "Microsoft bitmap (BMP) picture"
+    }
+    endian = LITTLE_ENDIAN
+
+    COMPRESSION_NAME = {
+        0: u"Uncompressed",
+        1: u"RLE 8-bit",
+        2: u"RLE 4-bit",
+        3: u"Bitfields",
+        4: u"JPEG",
+        5: u"PNG",
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, 2) != 'BM':
+            return "Wrong file signature"
+        if self["header/header_size"].value not in (12, 40, 108):
+            return "Unknown header size (%s)" % self["header_size"].value
+        if self["header/nb_plan"].value != 1:
+            return "Invalid number of planes"
+        return True
+
+    def createFields(self):
+        yield String(self, "signature", 2, "Header (\"BM\")", charset="ASCII")
+        yield UInt32(self, "file_size", "File size (bytes)")
+        yield PaddingBytes(self, "reserved", 4, "Reserved")
+        yield UInt32(self, "data_start", "Data start position")
+        yield BmpHeader(self, "header")
+
+        # Compute number of color
+        header = self["header"]
+        bpp = header["bpp"].value
+        if 0 < bpp <= 8:
+            if "used_colors" in header and header["used_colors"].value:
+                nb_color = header["used_colors"].value
+            else:
+                nb_color = (1 << bpp)
+        else:
+            nb_color = 0
+
+        # Color palette (if any)
+        if nb_color:
+            yield PaletteRGBA(self, "palette", nb_color)
+
+        # Seek to data start
+        field = self.seekByte(self["data_start"].value)
+        if field:
+            yield field
+
+        # Image pixels
+        size = min(self["file_size"].value-self["data_start"].value, (self.size - self.current_size)//8)
+        yield parseImageData(self, "pixels", size, header)
+
+    def createDescription(self):
+        return u"Microsoft Bitmap version %s" % self["header"].getFormatVersion()
+
+    def createContentSize(self):
+        return self["file_size"].value * 8
+
diff --git a/lib/hachoir_parser/image/common.py b/lib/hachoir_parser/image/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..5046058a7bad8b1b00d2f347bf134ca9a39a7bc3
--- /dev/null
+++ b/lib/hachoir_parser/image/common.py
@@ -0,0 +1,49 @@
+from hachoir_core.field import FieldSet, UserVector, UInt8
+
+class RGB(FieldSet):
+    color_name = {
+        (  0,   0,   0): "Black",
+        (255,   0,   0): "Red",
+        (  0, 255,   0): "Green",
+        (  0,   0, 255): "Blue",
+        (255, 255, 255): "White",
+    }
+    static_size = 24
+
+    def createFields(self):
+        yield UInt8(self, "red", "Red")
+        yield UInt8(self, "green", "Green")
+        yield UInt8(self, "blue", "Blue")
+
+    def createDescription(self):
+        rgb = self["red"].value, self["green"].value, self["blue"].value
+        name = self.color_name.get(rgb)
+        if not name:
+            name = "#%02X%02X%02X" % rgb
+        return "RGB color: " + name
+
+class RGBA(RGB):
+    static_size = 32
+
+    def createFields(self):
+        yield UInt8(self, "red", "Red")
+        yield UInt8(self, "green", "Green")
+        yield UInt8(self, "blue", "Blue")
+        yield UInt8(self, "alpha", "Alpha")
+
+    def createDescription(self):
+        description = RGB.createDescription(self)
+        opacity = self["alpha"].value*100/255
+        return "%s (opacity: %s%%)" % (description, opacity)
+
+class PaletteRGB(UserVector):
+    item_class = RGB
+    item_name = "color"
+    def createDescription(self):
+        return "Palette of %u RGB colors" % len(self)
+
+class PaletteRGBA(PaletteRGB):
+    item_class = RGBA
+    def createDescription(self):
+        return "Palette of %u RGBA colors" % len(self)
+
diff --git a/lib/hachoir_parser/image/exif.py b/lib/hachoir_parser/image/exif.py
new file mode 100644
index 0000000000000000000000000000000000000000..7b867933310f3b7aa2b860f150396c6ef7b3683a
--- /dev/null
+++ b/lib/hachoir_parser/image/exif.py
@@ -0,0 +1,361 @@
+"""
+EXIF metadata parser (can be found in a JPEG picture for example)
+
+Author: Victor Stinner
+"""
+
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt16, UInt32,
+    Int32, Enum, String,
+    Bytes, SubFile,
+    NullBytes, createPaddingField)
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN, NETWORK_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.tools import createDict
+
+MAX_COUNT = 1000
+
+def rationalFactory(class_name, size, field_class):
+    class Rational(FieldSet):
+        static_size = size
+
+        def createFields(self):
+            yield field_class(self, "numerator")
+            yield field_class(self, "denominator")
+
+        def createValue(self):
+            return float(self["numerator"].value) / self["denominator"].value
+    cls = Rational
+    cls.__name__ = class_name
+    return cls
+
+RationalInt32 = rationalFactory("RationalInt32", 64, Int32)
+RationalUInt32 = rationalFactory("RationalUInt32", 64, UInt32)
+
+class BasicIFDEntry(FieldSet):
+    TYPE_BYTE = 0
+    TYPE_UNDEFINED = 7
+    TYPE_RATIONAL = 5
+    TYPE_SIGNED_RATIONAL = 10
+    TYPE_INFO = {
+         1: (UInt8, "BYTE (8 bits)"),
+         2: (String, "ASCII (8 bits)"),
+         3: (UInt16, "SHORT (16 bits)"),
+         4: (UInt32, "LONG (32 bits)"),
+         5: (RationalUInt32, "RATIONAL (2x LONG, 64 bits)"),
+         7: (Bytes, "UNDEFINED (8 bits)"),
+         9: (Int32, "SIGNED LONG (32 bits)"),
+        10: (RationalInt32, "SRATIONAL (2x SIGNED LONGs, 64 bits)"),
+    }
+    ENTRY_FORMAT = createDict(TYPE_INFO, 0)
+    TYPE_NAME = createDict(TYPE_INFO, 1)
+
+    def createFields(self):
+        yield Enum(textHandler(UInt16(self, "tag", "Tag"), hexadecimal), self.TAG_NAME)
+        yield Enum(textHandler(UInt16(self, "type", "Type"), hexadecimal), self.TYPE_NAME)
+        yield UInt32(self, "count", "Count")
+        if self["type"].value not in (self.TYPE_BYTE, self.TYPE_UNDEFINED) \
+        and  MAX_COUNT < self["count"].value:
+            raise ParserError("EXIF: Invalid count value (%s)" % self["count"].value)
+        value_size, array_size = self.getSizes()
+
+        # Get offset/value
+        if not value_size:
+            yield NullBytes(self, "padding", 4)
+        elif value_size <= 32:
+            if 1 < array_size:
+                name = "value[]"
+            else:
+                name = "value"
+            kw = {}
+            cls = self.value_cls
+            if cls is String:
+                args = (self, name, value_size/8, "Value")
+                kw["strip"] = " \0"
+                kw["charset"] = "ISO-8859-1"
+            elif cls is Bytes:
+                args = (self, name, value_size/8, "Value")
+            else:
+                args = (self, name, "Value")
+            for index in xrange(array_size):
+                yield cls(*args, **kw)
+
+            size = array_size * value_size
+            if size < 32:
+                yield NullBytes(self, "padding", (32-size)//8)
+        else:
+            yield UInt32(self, "offset", "Value offset")
+
+    def getSizes(self):
+        """
+        Returns (value_size, array_size): value_size in bits and
+        array_size in number of items.
+        """
+        # Create format
+        self.value_cls = self.ENTRY_FORMAT.get(self["type"].value, Bytes)
+
+        # Set size
+        count = self["count"].value
+        if self.value_cls in (String, Bytes):
+            return 8 * count, 1
+        else:
+            return self.value_cls.static_size * count, count
+
+class ExifEntry(BasicIFDEntry):
+    OFFSET_JPEG_SOI = 0x0201
+    EXIF_IFD_POINTER = 0x8769
+
+    TAG_WIDTH = 0xA002
+    TAG_HEIGHT = 0xA003
+
+    TAG_GPS_LATITUDE_REF = 0x0001
+    TAG_GPS_LATITUDE = 0x0002
+    TAG_GPS_LONGITUDE_REF = 0x0003
+    TAG_GPS_LONGITUDE = 0x0004
+    TAG_GPS_ALTITUDE_REF = 0x0005
+    TAG_GPS_ALTITUDE = 0x0006
+    TAG_GPS_TIMESTAMP = 0x0007
+    TAG_GPS_DATESTAMP = 0x001d
+
+    TAG_IMG_TITLE = 0x010e
+    TAG_FILE_TIMESTAMP = 0x0132
+    TAG_SOFTWARE = 0x0131
+    TAG_CAMERA_MODEL = 0x0110
+    TAG_CAMERA_MANUFACTURER = 0x010f
+    TAG_ORIENTATION = 0x0112
+    TAG_EXPOSURE = 0x829A
+    TAG_FOCAL = 0x829D
+    TAG_BRIGHTNESS = 0x9203
+    TAG_APERTURE = 0x9205
+    TAG_USER_COMMENT = 0x9286
+
+    TAG_NAME = {
+        # GPS
+        0x0000: "GPS version ID",
+        0x0001: "GPS latitude ref",
+        0x0002: "GPS latitude",
+        0x0003: "GPS longitude ref",
+        0x0004: "GPS longitude",
+        0x0005: "GPS altitude ref",
+        0x0006: "GPS altitude",
+        0x0007: "GPS timestamp",
+        0x0008: "GPS satellites",
+        0x0009: "GPS status",
+        0x000a: "GPS measure mode",
+        0x000b: "GPS DOP",
+        0x000c: "GPS speed ref",
+        0x000d: "GPS speed",
+        0x000e: "GPS track ref",
+        0x000f: "GPS track",
+        0x0010: "GPS img direction ref",
+        0x0011: "GPS img direction",
+        0x0012: "GPS map datum",
+        0x0013: "GPS dest latitude ref",
+        0x0014: "GPS dest latitude",
+        0x0015: "GPS dest longitude ref",
+        0x0016: "GPS dest longitude",
+        0x0017: "GPS dest bearing ref",
+        0x0018: "GPS dest bearing",
+        0x0019: "GPS dest distance ref",
+        0x001a: "GPS dest distance",
+        0x001b: "GPS processing method",
+        0x001c: "GPS area information",
+        0x001d: "GPS datestamp",
+        0x001e: "GPS differential",
+
+        0x0100: "Image width",
+        0x0101: "Image height",
+        0x0102: "Number of bits per component",
+        0x0103: "Compression scheme",
+        0x0106: "Pixel composition",
+        TAG_ORIENTATION: "Orientation of image",
+        0x0115: "Number of components",
+        0x011C: "Image data arrangement",
+        0x0212: "Subsampling ratio Y to C",
+        0x0213: "Y and C positioning",
+        0x011A: "Image resolution width direction",
+        0x011B: "Image resolution in height direction",
+        0x0128: "Unit of X and Y resolution",
+
+        0x0111: "Image data location",
+        0x0116: "Number of rows per strip",
+        0x0117: "Bytes per compressed strip",
+        0x0201: "Offset to JPEG SOI",
+        0x0202: "Bytes of JPEG data",
+
+        0x012D: "Transfer function",
+        0x013E: "White point chromaticity",
+        0x013F: "Chromaticities of primaries",
+        0x0211: "Color space transformation matrix coefficients",
+        0x0214: "Pair of blank and white reference values",
+
+        TAG_FILE_TIMESTAMP: "File change date and time",
+        TAG_IMG_TITLE: "Image title",
+        TAG_CAMERA_MANUFACTURER: "Camera (Image input equipment) manufacturer",
+        TAG_CAMERA_MODEL: "Camera (Input input equipment) model",
+        TAG_SOFTWARE: "Software",
+        0x013B: "File change date and time",
+        0x8298: "Copyright holder",
+        0x8769: "Exif IFD Pointer",
+
+        TAG_EXPOSURE: "Exposure time",
+        TAG_FOCAL: "F number",
+        0x8822: "Exposure program",
+        0x8824: "Spectral sensitivity",
+        0x8827: "ISO speed rating",
+        0x8828: "Optoelectric conversion factor OECF",
+        0x9201: "Shutter speed",
+        0x9202: "Aperture",
+        TAG_BRIGHTNESS: "Brightness",
+        0x9204: "Exposure bias",
+        TAG_APERTURE: "Maximum lens aperture",
+        0x9206: "Subject distance",
+        0x9207: "Metering mode",
+        0x9208: "Light source",
+        0x9209: "Flash",
+        0x920A: "Lens focal length",
+        0x9214: "Subject area",
+        0xA20B: "Flash energy",
+        0xA20C: "Spatial frequency response",
+        0xA20E: "Focal plane X resolution",
+        0xA20F: "Focal plane Y resolution",
+        0xA210: "Focal plane resolution unit",
+        0xA214: "Subject location",
+        0xA215: "Exposure index",
+        0xA217: "Sensing method",
+        0xA300: "File source",
+        0xA301: "Scene type",
+        0xA302: "CFA pattern",
+        0xA401: "Custom image processing",
+        0xA402: "Exposure mode",
+        0xA403: "White balance",
+        0xA404: "Digital zoom ratio",
+        0xA405: "Focal length in 35 mm film",
+        0xA406: "Scene capture type",
+        0xA407: "Gain control",
+        0xA408: "Contrast",
+
+        0x9000: "Exif version",
+        0xA000: "Supported Flashpix version",
+        0xA001: "Color space information",
+        0x9101: "Meaning of each component",
+        0x9102: "Image compression mode",
+        TAG_WIDTH: "Valid image width",
+        TAG_HEIGHT: "Valid image height",
+        0x927C: "Manufacturer notes",
+        TAG_USER_COMMENT: "User comments",
+        0xA004: "Related audio file",
+        0x9003: "Date and time of original data generation",
+        0x9004: "Date and time of digital data generation",
+        0x9290: "DateTime subseconds",
+        0x9291: "DateTimeOriginal subseconds",
+        0x9292: "DateTimeDigitized subseconds",
+        0xA420: "Unique image ID",
+        0xA005: "Interoperability IFD Pointer"
+    }
+
+    def createDescription(self):
+        return "Entry: %s" % self["tag"].display
+
+def sortExifEntry(a,b):
+    return int( a["offset"].value - b["offset"].value )
+
+class ExifIFD(FieldSet):
+    def seek(self, offset):
+        """
+        Seek to byte address relative to parent address.
+        """
+        padding = offset - (self.address + self.current_size)/8
+        if 0 < padding:
+            return createPaddingField(self, padding*8)
+        else:
+            return None
+
+    def createFields(self):
+        offset_diff = 6
+        yield UInt16(self, "count", "Number of entries")
+        entries = []
+        next_chunk_offset = None
+        count = self["count"].value
+        if not count:
+            return
+        while count:
+            addr = self.absolute_address + self.current_size
+            next = self.stream.readBits(addr, 32, NETWORK_ENDIAN)
+            if next in (0, 0xF0000000):
+                break
+            entry = ExifEntry(self, "entry[]")
+            yield entry
+            if entry["tag"].value in (ExifEntry.EXIF_IFD_POINTER, ExifEntry.OFFSET_JPEG_SOI):
+                next_chunk_offset = entry["value"].value + offset_diff
+            if 32 < entry.getSizes()[0]:
+                entries.append(entry)
+            count -= 1
+        yield UInt32(self, "next", "Next IFD offset")
+        try:
+            entries.sort( sortExifEntry )
+        except TypeError:
+            raise ParserError("Unable to sort entries!")
+        value_index = 0
+        for entry in entries:
+            padding = self.seek(entry["offset"].value + offset_diff)
+            if padding is not None:
+                yield padding
+
+            value_size, array_size = entry.getSizes()
+            if not array_size:
+                continue
+            cls = entry.value_cls
+            if 1 < array_size:
+                name = "value_%s[]" % entry.name
+            else:
+                name = "value_%s" % entry.name
+            desc = "Value of \"%s\"" % entry["tag"].display
+            if cls is String:
+                for index in xrange(array_size):
+                    yield cls(self, name, value_size/8, desc, strip=" \0", charset="ISO-8859-1")
+            elif cls is Bytes:
+                for index in xrange(array_size):
+                    yield cls(self, name, value_size/8, desc)
+            else:
+                for index in xrange(array_size):
+                    yield cls(self, name, desc)
+            value_index += 1
+        if next_chunk_offset is not None:
+            padding = self.seek(next_chunk_offset)
+            if padding is not None:
+                yield padding
+
+    def createDescription(self):
+        return "Exif IFD (id %s)" % self["id"].value
+
+class Exif(FieldSet):
+    def createFields(self):
+        # Headers
+        yield String(self, "header", 6, "Header (Exif\\0\\0)", charset="ASCII")
+        if self["header"].value != "Exif\0\0":
+            raise ParserError("Invalid EXIF signature!")
+        yield String(self, "byte_order", 2, "Byte order", charset="ASCII")
+        if self["byte_order"].value not in ("II", "MM"):
+            raise ParserError("Invalid endian!")
+        if self["byte_order"].value == "II":
+           self.endian = LITTLE_ENDIAN
+        else:
+           self.endian = BIG_ENDIAN
+        yield UInt16(self, "version", "TIFF version number")
+        yield UInt32(self, "img_dir_ofs", "Next image directory offset")
+        while not self.eof:
+            addr = self.absolute_address + self.current_size
+            tag = self.stream.readBits(addr, 16, NETWORK_ENDIAN)
+            if tag == 0xFFD8:
+                size = (self._size - self.current_size) // 8
+                yield SubFile(self, "thumbnail", size, "Thumbnail (JPEG file)", mime_type="image/jpeg")
+                break
+            elif tag == 0xFFFF:
+                break
+            yield ExifIFD(self, "ifd[]", "IFD")
+        padding = self.seekBit(self._size)
+        if padding is not None:
+            yield padding
+
+
diff --git a/lib/hachoir_parser/image/gif.py b/lib/hachoir_parser/image/gif.py
new file mode 100644
index 0000000000000000000000000000000000000000..c7e0b89c33ba4c1c0b451e6b9c91dee5dc6f3b98
--- /dev/null
+++ b/lib/hachoir_parser/image/gif.py
@@ -0,0 +1,227 @@
+"""
+GIF picture parser.
+
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    Enum, UInt8, UInt16,
+    Bit, Bits, NullBytes,
+    String, PascalString8, Character,
+    NullBits, RawBytes)
+from hachoir_parser.image.common import PaletteRGB
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.tools import humanDuration
+from hachoir_core.text_handler import textHandler, displayHandler, hexadecimal
+
+# Maximum image dimension (in pixel)
+MAX_WIDTH = 6000
+MAX_HEIGHT = MAX_WIDTH
+MAX_FILE_SIZE = 100 * 1024 * 1024
+
+class Image(FieldSet):
+    def createFields(self):
+        yield UInt16(self, "left", "Left")
+        yield UInt16(self, "top", "Top")
+        yield UInt16(self, "width", "Width")
+        yield UInt16(self, "height", "Height")
+
+        yield Bits(self, "bpp", 3, "Bits / pixel minus one")
+        yield NullBits(self, "nul", 2)
+        yield Bit(self, "sorted", "Sorted??")
+        yield Bit(self, "interlaced", "Interlaced?")
+        yield Bit(self, "has_local_map", "Use local color map?")
+
+        if self["has_local_map"].value:
+            nb_color = 1 << (1 + self["bpp"].value)
+            yield PaletteRGB(self, "local_map", nb_color, "Local color map")
+
+        yield UInt8(self, "code_size", "LZW Minimum Code Size")
+        while True:
+            blen = UInt8(self, "block_len[]", "Block Length")
+            yield blen
+            if blen.value != 0:
+                yield RawBytes(self, "data[]", blen.value, "Image Data")
+            else:
+                break
+
+    def createDescription(self):
+        return "Image: %ux%u pixels at (%u,%u)" % (
+            self["width"].value, self["height"].value,
+            self["left"].value, self["top"].value)
+
+DISPOSAL_METHOD = {
+    0: "No disposal specified",
+    1: "Do not dispose",
+    2: "Restore to background color",
+    3: "Restore to previous",
+}
+
+NETSCAPE_CODE = {
+    1: "Loop count",
+}
+
+def parseApplicationExtension(parent):
+    yield PascalString8(parent, "app_name", "Application name")
+    yield UInt8(parent, "size")
+    size = parent["size"].value
+    if parent["app_name"].value == "NETSCAPE2.0" and size == 3:
+        yield Enum(UInt8(parent, "netscape_code"), NETSCAPE_CODE)
+        if parent["netscape_code"].value == 1:
+            yield UInt16(parent, "loop_count")
+        else:
+            yield RawBytes(parent, "raw", 2)
+    else:
+        yield RawBytes(parent, "raw", size)
+    yield NullBytes(parent, "terminator", 1, "Terminator (0)")
+
+def parseGraphicControl(parent):
+    yield UInt8(parent, "size", "Block size (4)")
+
+    yield Bit(parent, "has_transp", "Has transparency")
+    yield Bit(parent, "user_input", "User input")
+    yield Enum(Bits(parent, "disposal_method", 3), DISPOSAL_METHOD)
+    yield NullBits(parent, "reserved[]", 3)
+
+    if parent["size"].value != 4:
+        raise ParserError("Invalid graphic control size")
+    yield displayHandler(UInt16(parent, "delay", "Delay time in millisecond"), humanDuration)
+    yield UInt8(parent, "transp", "Transparent color index")
+    yield NullBytes(parent, "terminator", 1, "Terminator (0)")
+
+def parseComments(parent):
+    while True:
+        field = PascalString8(parent, "comment[]", strip=" \0\r\n\t")
+        yield field
+        if field.length == 0:
+            break
+
+def parseTextExtension(parent):
+    yield UInt8(parent, "block_size", "Block Size")
+    yield UInt16(parent, "left", "Text Grid Left")
+    yield UInt16(parent, "top", "Text Grid Top")
+    yield UInt16(parent, "width", "Text Grid Width")
+    yield UInt16(parent, "height", "Text Grid Height")
+    yield UInt8(parent, "cell_width", "Character Cell Width")
+    yield UInt8(parent, "cell_height", "Character Cell Height")
+    yield UInt8(parent, "fg_color", "Foreground Color Index")
+    yield UInt8(parent, "bg_color", "Background Color Index")
+    while True:
+        field = PascalString8(parent, "comment[]", strip=" \0\r\n\t")
+        yield field
+        if field.length == 0:
+            break
+
+def defaultExtensionParser(parent):
+    while True:
+        size = UInt8(parent, "size[]", "Size (in bytes)")
+        yield size
+        if 0 < size.value:
+            yield RawBytes(parent, "content[]", size.value)
+        else:
+            break
+
+class Extension(FieldSet):
+    ext_code = {
+        0xf9: ("graphic_ctl[]", parseGraphicControl, "Graphic control"),
+        0xfe: ("comments[]", parseComments, "Comments"),
+        0xff: ("app_ext[]", parseApplicationExtension, "Application extension"),
+        0x01: ("text_ext[]", parseTextExtension, "Plain text extension")
+    }
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        code = self["code"].value
+        if code in self.ext_code:
+            self._name, self.parser, self._description = self.ext_code[code]
+        else:
+            self.parser = defaultExtensionParser
+
+    def createFields(self):
+        yield textHandler(UInt8(self, "code", "Extension code"), hexadecimal)
+        for field in self.parser(self):
+            yield field
+
+    def createDescription(self):
+        return "Extension: function %s" % self["func"].display
+
+class ScreenDescriptor(FieldSet):
+    def createFields(self):
+        yield UInt16(self, "width", "Width")
+        yield UInt16(self, "height", "Height")
+        yield Bits(self, "bpp", 3, "Bits per pixel minus one")
+        yield Bit(self, "reserved", "(reserved)")
+        yield Bits(self, "color_res", 3, "Color resolution minus one")
+        yield Bit(self, "global_map", "Has global map?")
+        yield UInt8(self, "background", "Background color")
+        yield UInt8(self, "pixel_aspect_ratio", "Pixel Aspect Ratio")
+
+    def createDescription(self):
+        colors = 1 << (self["bpp"].value+1)
+        return "Screen descriptor: %ux%u pixels %u colors" \
+            % (self["width"].value, self["height"].value, colors)
+
+class GifFile(Parser):
+    endian = LITTLE_ENDIAN
+    separator_name = {
+        "!": "Extension",
+        ",": "Image",
+        ";": "Terminator"
+    }
+    PARSER_TAGS = {
+        "id": "gif",
+        "category": "image",
+        "file_ext": ("gif",),
+        "mime": (u"image/gif",),
+        "min_size": (6 + 7 + 1 + 9)*8,   # signature + screen + separator + image
+        "magic": (("GIF87a", 0), ("GIF89a", 0)),
+        "description": "GIF picture"
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, 6) not in ("GIF87a", "GIF89a"):
+            return "Wrong header"
+        if self["screen/width"].value == 0 or self["screen/height"].value == 0:
+            return "Invalid image size"
+        if MAX_WIDTH < self["screen/width"].value:
+            return "Image width too big (%u)" % self["screen/width"].value
+        if MAX_HEIGHT < self["screen/height"].value:
+            return "Image height too big (%u)" % self["screen/height"].value
+        return True
+
+    def createFields(self):
+        # Header
+        yield String(self, "magic", 3, "File magic code", charset="ASCII")
+        yield String(self, "version", 3, "GIF version", charset="ASCII")
+
+        yield ScreenDescriptor(self, "screen")
+        if self["screen/global_map"].value:
+            bpp = (self["screen/bpp"].value+1)
+            yield PaletteRGB(self, "color_map", 1 << bpp, "Color map")
+            self.color_map = self["color_map"]
+        else:
+            self.color_map = None
+
+        self.images = []
+        while True:
+            code = Enum(Character(self, "separator[]", "Separator code"), self.separator_name)
+            yield code
+            code = code.value
+            if code == "!":
+                yield Extension(self, "extensions[]")
+            elif code == ",":
+                yield Image(self, "image[]")
+            elif code == ";":
+                # GIF Terminator
+                break
+            else:
+                raise ParserError("Wrong GIF image separator: 0x%02X" % ord(code))
+
+    def createContentSize(self):
+        field = self["image[0]"]
+        start = field.absolute_address + field.size
+        end = start + MAX_FILE_SIZE*8
+        pos = self.stream.searchBytes("\0;", start, end)
+        if pos:
+            return pos + 16
+        return None
diff --git a/lib/hachoir_parser/image/ico.py b/lib/hachoir_parser/image/ico.py
new file mode 100644
index 0000000000000000000000000000000000000000..193a81c62cec9ea28c6ae6b976ef081c3f9afee6
--- /dev/null
+++ b/lib/hachoir_parser/image/ico.py
@@ -0,0 +1,139 @@
+"""
+Microsoft Windows icon and cursor file format parser.
+
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt16, UInt32, Enum, RawBytes)
+from hachoir_parser.image.common import PaletteRGBA
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_parser.common.win32 import BitmapInfoHeader
+
+class IconHeader(FieldSet):
+    def createFields(self):
+        yield UInt8(self, "width", "Width")
+        yield UInt8(self, "height", "Height")
+        yield UInt8(self, "nb_color", "Number of colors")
+        yield UInt8(self, "reserved", "(reserved)")
+        yield UInt16(self, "planes", "Color planes (=1)")
+        yield UInt16(self, "bpp", "Bits per pixel")
+        yield UInt32(self, "size", "Content size in bytes")
+        yield UInt32(self, "offset", "Data offset")
+
+    def createDescription(self):
+        return "Icon: %ux%u pixels, %u bits/pixel" % \
+            (self["width"].value, self["height"].value, self["bpp"].value)
+
+    def isValid(self):
+        if self["nb_color"].value == 0:
+            if self["bpp"].value in (8, 24, 32) and self["planes"].value == 1:
+                return True
+            if self["planes"].value == 4 and self["bpp"].value == 0:
+                return True
+        elif self["nb_color"].value == 16:
+            if self["bpp"].value in (4, 16) and self["planes"].value == 1:
+                return True
+        else:
+            return False
+        if self["bpp"].value == 0 and self["planes"].value == 0:
+            return True
+        return False
+
+class IconData(FieldSet):
+    def __init__(self, parent, name, header):
+        FieldSet.__init__(self, parent, name, "Icon data")
+        self.header = header
+
+    def createFields(self):
+        yield BitmapInfoHeader(self, "header")
+
+        # Read palette if needed
+        nb_color = self.header["nb_color"].value
+        if self.header["bpp"].value == 8:
+            nb_color = 256
+        if nb_color != 0:
+            yield PaletteRGBA(self, "palette", nb_color)
+
+        # Read pixels
+        size = self.header["size"].value - self.current_size/8
+        yield RawBytes(self, "pixels", size, "Image pixels")
+
+class IcoFile(Parser):
+    endian = LITTLE_ENDIAN
+    PARSER_TAGS = {
+        "id": "ico",
+        "category": "image",
+        "file_ext": ("ico", "cur"),
+        "mime": (u"image/x-ico",),
+        "min_size": (22 + 40)*8,
+#        "magic": (
+#            ("\0\0\1\0", 0), # Icon
+#            ("\0\0\2\0", 0), # Cursor
+#        ),
+        "magic_regex": ((
+            # signature=0, type=(1|2), count in 1..20,
+            "\0\0[\1\2]\0[\x01-\x14]."
+            # size=(16x16|32x32|48x48|64x64),
+            "(\x10\x10|\x20\x20|\x30\x30|\x40\x40)"
+            # nb_color=0 or 16; nb_plane=(0|1|4), bpp=(0|8|24|32)
+            "[\x00\x10]\0[\0\1\4][\0\x08\x18\x20]\0",
+        0),),
+        "description": "Microsoft Windows icon or cursor",
+    }
+    TYPE_NAME = {
+        1: "icon",
+        2: "cursor"
+    }
+
+    def validate(self):
+        # Check signature and type
+        if self["signature"].value != 0:
+            return "Wrong file signature"
+        if self["type"].value not in self.TYPE_NAME:
+            return "Unknown picture type"
+
+        # Check all icon headers
+        index = -1
+        for field in self:
+            if field.name.startswith("icon_header"):
+                index += 1
+                if not field.isValid():
+                    return "Invalid header #%u" % index
+            elif 0 <= index:
+                break
+        return True
+
+    def createFields(self):
+        yield UInt16(self, "signature", "Signature (0x0000)")
+        yield Enum(UInt16(self, "type", "Resource type"), self.TYPE_NAME)
+        yield UInt16(self, "nb_items", "Number of items")
+        items = []
+        for index in xrange(self["nb_items"].value):
+            item = IconHeader(self, "icon_header[]")
+            yield item
+            items.append(item)
+        for header in items:
+            if header["offset"].value*8 != self.current_size:
+                raise ParserError("Icon: Problem with icon data offset.")
+            yield IconData(self, "icon_data[]", header)
+
+    def createDescription(self):
+        desc = "Microsoft Windows %s" % self["type"].display
+        size = []
+        for header in self.array("icon_header"):
+            size.append("%ux%ux%u" % (header["width"].value,
+                header["height"].value, header["bpp"].value))
+        if size:
+            return "%s: %s" % (desc, ", ".join(size))
+        else:
+            return desc
+
+    def createContentSize(self):
+        count = self["nb_items"].value
+        if not count:
+            return None
+        field = self["icon_data[%u]" % (count-1)]
+        return field.absolute_address + field.size
+
diff --git a/lib/hachoir_parser/image/iptc.py b/lib/hachoir_parser/image/iptc.py
new file mode 100644
index 0000000000000000000000000000000000000000..6727de7f4504fda0386a889835a65da89daa1bff
--- /dev/null
+++ b/lib/hachoir_parser/image/iptc.py
@@ -0,0 +1,113 @@
+"""
+IPTC metadata parser (can be found in a JPEG picture for example)
+
+Sources:
+- Image-MetaData Perl module:
+  http://www.annocpan.org/~BETTELLI/Image-MetaData-JPEG-0.15/...
+  ...lib/Image/MetaData/JPEG/TagLists.pod
+- IPTC tag name and description:
+  http://peccatte.karefil.com/software/IPTCTableau.pdf
+
+Author: Victor Stinner
+"""
+
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt16, String, RawBytes, NullBytes)
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+def IPTC_String(parent, name, desc=None):
+    # Charset may be utf-8, ISO-8859-1, or ...
+    return String(parent, name, parent["size"].value, desc,
+        strip=" ")
+
+dataset1 = {
+}
+dataset2 = {
+      0: ("record_version", "Record version (2 for JPEG)", UInt16),
+      5: ("obj_name", "Object name", None),
+      7: ("edit_stat", "Edit status", None),
+     10: ("urgency", "Urgency", UInt8),
+     15: ("category[]", "Category", None),
+     22: ("fixture", "Fixture identifier", IPTC_String),
+     25: ("keyword[]", "Keywords", IPTC_String),
+     30: ("release_date", "Release date", IPTC_String),
+     35: ("release_time", "Release time", IPTC_String),
+     40: ("instruction", "Special instructions", IPTC_String),
+     55: ("date_created", "Date created", IPTC_String),
+     60: ("time_created", "Time created (ISO 8601)", IPTC_String),
+     65: ("originating_prog", "Originating program", IPTC_String),
+     70: ("prog_ver", "Program version", IPTC_String),
+     80: ("author", "By-line (Author)", IPTC_String),
+     85: ("author_job", "By-line (Author precision)", IPTC_String),
+     90: ("city", "City", IPTC_String),
+     95: ("state", "Province / State", IPTC_String),
+    100: ("country_code", "Country / Primary location code", IPTC_String),
+    101: ("country_name", "Country / Primary location name", IPTC_String),
+    103: ("trans_ref", "Original transmission reference", IPTC_String),
+    105: ("headline", "Headline", IPTC_String),
+    110: ("credit", "Credit", IPTC_String),
+    115: ("source", "Source", IPTC_String),
+    116: ("copyright", "Copyright notice", IPTC_String),
+    120: ("caption", "Caption/Abstract", IPTC_String),
+    122: ("writer", "Writer/editor", IPTC_String),
+    231: ("history[]", "Document history (timestamp)", IPTC_String)
+}
+datasets = {1: dataset1, 2: dataset2}
+
+class IPTC_Size(FieldSet):
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        value = 0
+        for field in self:
+            value <<= 15
+            value  += (field.value & 0x7fff)
+        self.createValue = lambda: value
+
+    def createFields(self):
+        while True:
+            field = UInt16(self, "value[]")
+            yield field
+            if field.value < 0x8000:
+                break
+
+class IPTC_Chunk(FieldSet):
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        number = self["dataset_nb"].value
+        self.dataset_info = None
+        if number in datasets:
+            tag = self["tag"].value
+            if tag in datasets[number]:
+                self.dataset_info = datasets[number][tag]
+                self._name = self.dataset_info[0]
+                self._description = self.dataset_info[1]
+        size_chunk = self["size"]
+        self._size = 3*8 + size_chunk.size + size_chunk.value*8
+
+    def createFields(self):
+        yield textHandler(UInt8(self, "signature", "IPTC signature (0x1c)"), hexadecimal)
+        if self["signature"].value != 0x1C:
+            raise ParserError("Wrong IPTC signature")
+        yield textHandler(UInt8(self, "dataset_nb", "Dataset number"), hexadecimal)
+        yield UInt8(self, "tag", "Tag")
+        yield IPTC_Size(self, "size", "Content size")
+
+        size = self["size"].value
+        if 0 < size:
+            if self.dataset_info:
+                cls = self.dataset_info[2]
+            else:
+                cls = None
+            if cls:
+                yield cls(self, "content")
+            else:
+                yield RawBytes(self, "content", size)
+
+class IPTC(FieldSet):
+    def createFields(self):
+        while 5 <= (self._size - self.current_size)/8:
+            yield IPTC_Chunk(self, "chunk[]")
+        size = (self._size - self.current_size) / 8
+        if 0 < size:
+            yield NullBytes(self, "padding", size)
+
diff --git a/lib/hachoir_parser/image/jpeg.py b/lib/hachoir_parser/image/jpeg.py
new file mode 100644
index 0000000000000000000000000000000000000000..30944aae724ca276475ea47a23a535b4ba9e1b27
--- /dev/null
+++ b/lib/hachoir_parser/image/jpeg.py
@@ -0,0 +1,368 @@
+"""
+JPEG picture parser.
+
+Information:
+
+- APP14 documents
+  http://partners.adobe.com/public/developer/en/ps/sdk/5116.DCT_Filter.pdf
+  http://java.sun.com/j2se/1.5.0/docs/api/javax/imageio/metadata/doc-files/jpeg_metadata.html#color
+- APP12:
+  http://search.cpan.org/~exiftool/Image-ExifTool/lib/Image/ExifTool/TagNames.pod
+
+Author: Victor Stinner
+"""
+
+from hachoir_core.error import HachoirError
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt16, Enum,
+    Bit, Bits, NullBits, NullBytes,
+    String, RawBytes)
+from hachoir_parser.image.common import PaletteRGB
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_parser.image.exif import Exif
+from hachoir_parser.image.photoshop_metadata import PhotoshopMetadata
+
+MAX_FILESIZE = 100 * 1024 * 1024
+
+# The four tables (hash/sum for color/grayscale JPEG) comes
+# from ImageMagick project
+QUALITY_HASH_COLOR = (
+    1020, 1015,  932,  848,  780,  735,  702,  679,  660,  645,
+     632,  623,  613,  607,  600,  594,  589,  585,  581,  571,
+     555,  542,  529,  514,  494,  474,  457,  439,  424,  410,
+     397,  386,  373,  364,  351,  341,  334,  324,  317,  309,
+     299,  294,  287,  279,  274,  267,  262,  257,  251,  247,
+     243,  237,  232,  227,  222,  217,  213,  207,  202,  198,
+     192,  188,  183,  177,  173,  168,  163,  157,  153,  148,
+     143,  139,  132,  128,  125,  119,  115,  108,  104,   99,
+      94,   90,   84,   79,   74,   70,   64,   59,   55,   49,
+      45,   40,   34,   30,   25,   20,   15,   11,    6,    4,
+       0)
+
+QUALITY_SUM_COLOR = (
+    32640,32635,32266,31495,30665,29804,29146,28599,28104,27670,
+    27225,26725,26210,25716,25240,24789,24373,23946,23572,22846,
+    21801,20842,19949,19121,18386,17651,16998,16349,15800,15247,
+    14783,14321,13859,13535,13081,12702,12423,12056,11779,11513,
+    11135,10955,10676,10392,10208, 9928, 9747, 9564, 9369, 9193,
+     9017, 8822, 8639, 8458, 8270, 8084, 7896, 7710, 7527, 7347,
+     7156, 6977, 6788, 6607, 6422, 6236, 6054, 5867, 5684, 5495,
+     5305, 5128, 4945, 4751, 4638, 4442, 4248, 4065, 3888, 3698,
+     3509, 3326, 3139, 2957, 2775, 2586, 2405, 2216, 2037, 1846,
+     1666, 1483, 1297, 1109,  927,  735,  554,  375,  201,  128,
+        0)
+
+QUALITY_HASH_GRAY = (
+    510,  505,  422,  380,  355,  338,  326,  318,  311,  305,
+    300,  297,  293,  291,  288,  286,  284,  283,  281,  280,
+    279,  278,  277,  273,  262,  251,  243,  233,  225,  218,
+    211,  205,  198,  193,  186,  181,  177,  172,  168,  164,
+    158,  156,  152,  148,  145,  142,  139,  136,  133,  131,
+    129,  126,  123,  120,  118,  115,  113,  110,  107,  105,
+    102,  100,   97,   94,   92,   89,   87,   83,   81,   79,
+     76,   74,   70,   68,   66,   63,   61,   57,   55,   52,
+     50,   48,   44,   42,   39,   37,   34,   31,   29,   26,
+     24,   21,   18,   16,   13,   11,    8,    6,    3,    2,
+      0)
+
+QUALITY_SUM_GRAY = (
+    16320,16315,15946,15277,14655,14073,13623,13230,12859,12560,
+    12240,11861,11456,11081,10714,10360,10027, 9679, 9368, 9056,
+     8680, 8331, 7995, 7668, 7376, 7084, 6823, 6562, 6345, 6125,
+     5939, 5756, 5571, 5421, 5240, 5086, 4976, 4829, 4719, 4616,
+     4463, 4393, 4280, 4166, 4092, 3980, 3909, 3835, 3755, 3688,
+     3621, 3541, 3467, 3396, 3323, 3247, 3170, 3096, 3021, 2952,
+     2874, 2804, 2727, 2657, 2583, 2509, 2437, 2362, 2290, 2211,
+     2136, 2068, 1996, 1915, 1858, 1773, 1692, 1620, 1552, 1477,
+     1398, 1326, 1251, 1179, 1109, 1031,  961,  884,  814,  736,
+      667,  592,  518,  441,  369,  292,  221,  151,   86,   64,
+        0)
+
+JPEG_NATURAL_ORDER = (
+     0,  1,  8, 16,  9,  2,  3, 10,
+    17, 24, 32, 25, 18, 11,  4,  5,
+    12, 19, 26, 33, 40, 48, 41, 34,
+    27, 20, 13,  6,  7, 14, 21, 28,
+    35, 42, 49, 56, 57, 50, 43, 36,
+    29, 22, 15, 23, 30, 37, 44, 51,
+    58, 59, 52, 45, 38, 31, 39, 46,
+    53, 60, 61, 54, 47, 55, 62, 63)
+
+class JpegChunkApp0(FieldSet):
+    UNIT_NAME = {
+        0: "pixels",
+        1: "dots per inch",
+        2: "dots per cm",
+    }
+
+    def createFields(self):
+        yield String(self, "jfif", 5, "JFIF string", charset="ASCII")
+        if self["jfif"].value != "JFIF\0":
+            raise ParserError(
+                "Stream doesn't look like JPEG chunk (wrong JFIF signature)")
+        yield UInt8(self, "ver_maj", "Major version")
+        yield UInt8(self, "ver_min", "Minor version")
+        yield Enum(UInt8(self, "units", "Units"), self.UNIT_NAME)
+        if self["units"].value == 0:
+            yield UInt16(self, "aspect_x", "Aspect ratio (X)")
+            yield UInt16(self, "aspect_y", "Aspect ratio (Y)")
+        else:
+            yield UInt16(self, "x_density", "X density")
+            yield UInt16(self, "y_density", "Y density")
+        yield UInt8(self, "thumb_w", "Thumbnail width")
+        yield UInt8(self, "thumb_h", "Thumbnail height")
+        thumb_size = self["thumb_w"].value * self["thumb_h"].value
+        if thumb_size != 0:
+            yield PaletteRGB(self, "thumb_palette", 256)
+            yield RawBytes(self, "thumb_data", thumb_size, "Thumbnail data")
+
+class Ducky(FieldSet):
+    BLOCK_TYPE = {
+        0: "end",
+        1: "Quality",
+        2: "Comment",
+        3: "Copyright",
+    }
+    def createFields(self):
+        yield Enum(UInt16(self, "type"), self.BLOCK_TYPE)
+        if self["type"].value == 0:
+            return
+        yield UInt16(self, "size")
+        size = self["size"].value
+        if size:
+            yield RawBytes(self, "data", size)
+
+class APP12(FieldSet):
+    """
+    The JPEG APP12 "Picture Info" segment was used by some older cameras, and
+    contains ASCII-based meta information.
+    """
+    def createFields(self):
+        yield String(self, "ducky", 5, '"Ducky" string', charset="ASCII")
+        while not self.eof:
+            yield Ducky(self, "item[]")
+
+class StartOfFrame(FieldSet):
+    def createFields(self):
+        yield UInt8(self, "precision")
+
+        yield UInt16(self, "height")
+        yield UInt16(self, "width")
+        yield UInt8(self, "nr_components")
+
+        for index in range(self["nr_components"].value):
+            yield UInt8(self, "component_id[]")
+            yield UInt8(self, "high[]")
+            yield UInt8(self, "low[]")
+
+class Comment(FieldSet):
+    def createFields(self):
+        yield String(self, "comment", self.size//8, strip="\0")
+
+class AdobeChunk(FieldSet):
+    COLORSPACE_TRANSFORMATION = {
+        1: "YCbCr (converted from RGB)",
+        2: "YCCK (converted from CMYK)",
+    }
+    def createFields(self):
+        if self.stream.readBytes(self.absolute_address, 5) != "Adobe":
+            yield RawBytes(self, "raw", self.size//8, "Raw data")
+            return
+        yield String(self, "adobe", 5, "\"Adobe\" string", charset="ASCII")
+        yield UInt16(self, "version", "DCT encoder version")
+        yield Enum(Bit(self, "flag00"),
+            {False: "Chop down or subsampling", True: "Blend"})
+        yield NullBits(self, "flags0_reserved", 15)
+        yield NullBytes(self, "flags1", 2)
+        yield Enum(UInt8(self, "color_transform", "Colorspace transformation code"), self.COLORSPACE_TRANSFORMATION)
+
+class StartOfScan(FieldSet):
+    def createFields(self):
+        yield UInt8(self, "nr_components")
+
+        for index in range(self["nr_components"].value):
+            comp_id = UInt8(self, "component_id[]")
+            yield comp_id
+            if not(1 <= comp_id.value <= self["nr_components"].value):
+               raise ParserError("JPEG error: Invalid component-id")
+            yield UInt8(self, "value[]")
+        yield RawBytes(self, "raw", 3) # TODO: What's this???
+
+class RestartInterval(FieldSet):
+    def createFields(self):
+        yield UInt16(self, "interval", "Restart interval")
+
+class QuantizationTable(FieldSet):
+    def createFields(self):
+        # Code based on function get_dqt() (jdmarker.c from libjpeg62)
+        yield Bits(self, "is_16bit", 4)
+        yield Bits(self, "index", 4)
+        if self["index"].value >= 4:
+            raise ParserError("Invalid quantification index (%s)" % self["index"].value)
+        if self["is_16bit"].value:
+            coeff_type = UInt16
+        else:
+            coeff_type = UInt8
+        for index in xrange(64):
+            natural = JPEG_NATURAL_ORDER[index]
+            yield coeff_type(self, "coeff[%u]" % natural)
+
+    def createDescription(self):
+        return "Quantification table #%u" % self["index"].value
+
+class DefineQuantizationTable(FieldSet):
+    def createFields(self):
+        while self.current_size < self.size:
+            yield QuantizationTable(self, "qt[]")
+
+class JpegChunk(FieldSet):
+    TAG_SOI = 0xD8
+    TAG_EOI = 0xD9
+    TAG_SOS = 0xDA
+    TAG_DQT = 0xDB
+    TAG_DRI = 0xDD
+    TAG_INFO = {
+        0xC4: ("huffman[]", "Define Huffman Table (DHT)", None),
+        0xD8: ("start_image", "Start of image (SOI)", None),
+        0xD9: ("end_image", "End of image (EOI)", None),
+        0xDA: ("start_scan", "Start Of Scan (SOS)", StartOfScan),
+        0xDB: ("quantization[]", "Define Quantization Table (DQT)", DefineQuantizationTable),
+        0xDC: ("nb_line", "Define number of Lines (DNL)", None),
+        0xDD: ("restart_interval", "Define Restart Interval (DRI)", RestartInterval),
+        0xE0: ("app0", "APP0", JpegChunkApp0),
+        0xE1: ("exif", "Exif metadata", Exif),
+        0xE2: ("icc", "ICC profile", None),
+        0xEC: ("app12", "APP12", APP12),
+        0xED: ("photoshop", "Photoshop", PhotoshopMetadata),
+        0xEE: ("adobe", "Image encoding information for DCT filters (Adobe)", AdobeChunk),
+        0xFE: ("comment[]", "Comment", Comment),
+    }
+    START_OF_FRAME = {
+        0xC0: u"Baseline",
+        0xC1: u"Extended sequential",
+        0xC2: u"Progressive",
+        0xC3: u"Lossless",
+        0xC5: u"Differential sequential",
+        0xC6: u"Differential progressive",
+        0xC7: u"Differential lossless",
+        0xC9: u"Extended sequential, arithmetic coding",
+        0xCA: u"Progressive, arithmetic coding",
+        0xCB: u"Lossless, arithmetic coding",
+        0xCD: u"Differential sequential, arithmetic coding",
+        0xCE: u"Differential progressive, arithmetic coding",
+        0xCF: u"Differential lossless, arithmetic coding",
+    }
+    for key, text in START_OF_FRAME.iteritems():
+        TAG_INFO[key] = ("start_frame", "Start of frame (%s)" % text.lower(), StartOfFrame)
+
+    def __init__(self, parent, name, description=None):
+        FieldSet.__init__(self, parent, name, description)
+        tag = self["type"].value
+        if tag == 0xE1:
+            # Hack for Adobe extension: XAP metadata (as XML)
+            bytes = self.stream.readBytes(self.absolute_address + 32, 6)
+            if bytes == "Exif\0\0":
+                self._name = "exif"
+                self._description = "EXIF"
+                self._parser = Exif
+            else:
+                self._parser = None
+        elif tag in self.TAG_INFO:
+            self._name, self._description, self._parser = self.TAG_INFO[tag]
+        else:
+            self._parser = None
+
+    def createFields(self):
+        yield textHandler(UInt8(self, "header", "Header"), hexadecimal)
+        if self["header"].value != 0xFF:
+            raise ParserError("JPEG: Invalid chunk header!")
+        yield textHandler(UInt8(self, "type", "Type"), hexadecimal)
+        tag = self["type"].value
+        if tag in (self.TAG_SOI, self.TAG_EOI):
+            return
+        yield UInt16(self, "size", "Size")
+        size = (self["size"].value - 2)
+        if 0 < size:
+            if self._parser:
+                yield self._parser(self, "content", "Chunk content", size=size*8)
+            else:
+                yield RawBytes(self, "data", size, "Data")
+
+    def createDescription(self):
+        return "Chunk: %s" % self["type"].display
+
+class JpegFile(Parser):
+    endian = BIG_ENDIAN
+    PARSER_TAGS = {
+        "id": "jpeg",
+        "category": "image",
+        "file_ext": ("jpg", "jpeg"),
+        "mime": (u"image/jpeg",),
+        "magic": (
+            ("\xFF\xD8\xFF\xE0", 0),   # (Start Of Image, APP0)
+            ("\xFF\xD8\xFF\xE1", 0),   # (Start Of Image, EXIF)
+            ("\xFF\xD8\xFF\xEE", 0),   # (Start Of Image, Adobe)
+        ),
+        "min_size": 22*8,
+        "description": "JPEG picture",
+        "subfile": "skip",
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, 2) != "\xFF\xD8":
+            return "Invalid file signature"
+        try:
+            for index, field in enumerate(self):
+                chunk_type = field["type"].value
+                if chunk_type not in JpegChunk.TAG_INFO:
+                    return "Unknown chunk type: 0x%02X (chunk #%s)" % (chunk_type, index)
+                if index == 2:
+                    # Only check 3 fields
+                    break
+        except HachoirError:
+            return "Unable to parse at least three chunks"
+        return True
+
+    def createFields(self):
+        while not self.eof:
+            chunk = JpegChunk(self, "chunk[]")
+            yield chunk
+            if chunk["type"].value == JpegChunk.TAG_SOS:
+                # TODO: Read JPEG image data...
+                break
+
+        # TODO: is it possible to handle piped input?
+        if self._size is None:
+            raise NotImplementedError
+
+        has_end = False
+        size = (self._size - self.current_size) // 8
+        if size:
+            if 2 < size \
+            and self.stream.readBytes(self._size - 16, 2) == "\xff\xd9":
+                has_end = True
+                size -= 2
+            yield RawBytes(self, "data", size, "JPEG data")
+        if has_end:
+            yield JpegChunk(self, "chunk[]")
+
+    def createDescription(self):
+        desc = "JPEG picture"
+        if "sof/content" in self:
+            header = self["sof/content"]
+            desc += ": %ux%u pixels" % (header["width"].value, header["height"].value)
+        return desc
+
+    def createContentSize(self):
+        if "end" in self:
+            return self["end"].absolute_address + self["end"].size
+        if "data" not in self:
+            return None
+        start = self["data"].absolute_address
+        end = self.stream.searchBytes("\xff\xd9", start, MAX_FILESIZE*8)
+        if end is not None:
+            return end + 16
+        return None
+
diff --git a/lib/hachoir_parser/image/pcx.py b/lib/hachoir_parser/image/pcx.py
new file mode 100644
index 0000000000000000000000000000000000000000..cb2a63bf0c022050c1a5216b781b5ff9d0a9d1dc
--- /dev/null
+++ b/lib/hachoir_parser/image/pcx.py
@@ -0,0 +1,73 @@
+"""
+PCX picture filter.
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (
+    UInt8, UInt16,
+    PaddingBytes, RawBytes,
+    Enum)
+from hachoir_parser.image.common import PaletteRGB
+from hachoir_core.endian import LITTLE_ENDIAN
+
+class PcxFile(Parser):
+    endian = LITTLE_ENDIAN
+    PARSER_TAGS = {
+        "id": "pcx",
+        "category": "image",
+        "file_ext": ("pcx",),
+        "mime": (u"image/x-pcx",),
+        "min_size": 128*8,
+        "description": "PC Paintbrush (PCX) picture"
+    }
+    compression_name = { 1: "Run-length encoding (RLE)" }
+    version_name = {
+        0: u"Version 2.5 of PC Paintbrush",
+        2: u"Version 2.8 with palette information",
+        3: u"Version 2.8 without palette information",
+        4: u"PC Paintbrush for Windows",
+        5: u"Version 3.0 (or greater) of PC Paintbrush"
+    }
+
+    def validate(self):
+        if self["id"].value != 10:
+            return "Wrong signature"
+        if self["version"].value not in self.version_name:
+            return "Unknown format version"
+        if self["bpp"].value not in (1, 2, 4, 8, 24, 32):
+            return "Unknown bits/pixel"
+        if self["reserved[0]"].value != "\0":
+            return "Invalid reserved value"
+        return True
+
+    def createFields(self):
+        yield UInt8(self, "id", "PCX identifier (10)")
+        yield Enum(UInt8(self, "version", "PCX version"), self.version_name)
+        yield Enum(UInt8(self, "compression", "Compression method"), self.compression_name)
+        yield UInt8(self, "bpp", "Bits / pixel")
+        yield UInt16(self, "xmin", "Minimum X")
+        yield UInt16(self, "ymin", "Minimum Y")
+        yield UInt16(self, "xmax", "Width minus one") # value + 1
+        yield UInt16(self, "ymax", "Height minus one") # value + 1
+        yield UInt16(self, "horiz_dpi", "Horizontal DPI")
+        yield UInt16(self, "vert_dpi", "Vertical DPI")
+        yield PaletteRGB(self, "palette_4bits", 16, "Palette (4 bits)")
+        yield PaddingBytes(self, "reserved[]", 1)
+        yield UInt8(self, "nb_color_plan", "Number of color plans")
+        yield UInt16(self, "bytes_per_line", "Bytes per line")
+        yield UInt16(self, "color_mode", "Color mode")
+        yield PaddingBytes(self, "reserved[]", 58)
+
+        if self._size is None: # TODO: is it possible to handle piped input?
+            raise NotImplementedError
+
+        nb_colors = 256
+        size = (self._size - self.current_size)/8
+        has_palette = self["bpp"].value == 8
+        if has_palette:
+            size -= nb_colors*3
+        yield RawBytes(self, "image_data", size, "Image data")
+
+        if has_palette:
+            yield PaletteRGB(self, "palette_8bits", nb_colors, "Palette (8 bit)")
+
diff --git a/lib/hachoir_parser/image/photoshop_metadata.py b/lib/hachoir_parser/image/photoshop_metadata.py
new file mode 100644
index 0000000000000000000000000000000000000000..be660cec0c76fcecbd950bd85cb959400c9093f9
--- /dev/null
+++ b/lib/hachoir_parser/image/photoshop_metadata.py
@@ -0,0 +1,83 @@
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt16, UInt32,
+    String, CString, PascalString8,
+    NullBytes, RawBytes)
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.tools import alignValue, createDict
+from hachoir_parser.image.iptc import IPTC
+from hachoir_parser.common.win32 import PascalStringWin32
+
+class Version(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "version")
+        yield UInt8(self, "has_realm")
+        yield PascalStringWin32(self, "writer_name", charset="UTF-16-BE")
+        yield PascalStringWin32(self, "reader_name", charset="UTF-16-BE")
+        yield UInt32(self, "file_version")
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield NullBytes(self, "padding", size)
+
+class Photoshop8BIM(FieldSet):
+    TAG_INFO = {
+        0x03ed: ("res_info", None, "Resolution information"),
+        0x03f3: ("print_flag", None, "Print flags: labels, crop marks, colour bars, etc."),
+        0x03f5: ("col_half_info", None, "Colour half-toning information"),
+        0x03f8: ("color_trans_func", None, "Colour transfer function"),
+        0x0404: ("iptc", IPTC, "IPTC/NAA"),
+        0x0406: ("jpeg_qual", None, "JPEG quality"),
+        0x0408: ("grid_guide", None, "Grid guides informations"),
+        0x040a: ("copyright_flag", None, "Copyright flag"),
+        0x040c: ("thumb_res2", None, "Thumbnail resource (2)"),
+        0x040d: ("glob_angle", None, "Global lighting angle for effects"),
+        0x0411: ("icc_tagged", None, "ICC untagged (1 means intentionally untagged)"),
+        0x0414: ("base_layer_id", None, "Base value for new layers ID's"),
+        0x0419: ("glob_altitude", None, "Global altitude"),
+        0x041a: ("slices", None, "Slices"),
+        0x041e: ("url_list", None, "Unicode URL's"),
+        0x0421: ("version", Version, "Version information"),
+        0x2710: ("print_flag2", None, "Print flags (2)"),
+    }
+    TAG_NAME = createDict(TAG_INFO, 0)
+    CONTENT_HANDLER = createDict(TAG_INFO, 1)
+    TAG_DESC = createDict(TAG_INFO, 2)
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        try:
+            self._name, self.handler, self._description = self.TAG_INFO[self["tag"].value]
+        except KeyError:
+            self.handler = None
+        size = self["size"]
+        self._size = size.address + size.size + alignValue(size.value, 2) * 8
+
+    def createFields(self):
+        yield String(self, "signature", 4, "8BIM signature", charset="ASCII")
+        if self["signature"].value != "8BIM":
+            raise ParserError("Stream doesn't look like 8BIM item (wrong signature)!")
+        yield textHandler(UInt16(self, "tag"), hexadecimal)
+        if self.stream.readBytes(self.absolute_address + self.current_size, 4) != "\0\0\0\0":
+            yield PascalString8(self, "name")
+            size = 2 + (self["name"].size // 8) % 2
+            yield NullBytes(self, "name_padding", size)
+        else:
+            yield String(self, "name", 4, strip="\0")
+        yield UInt16(self, "size")
+        size = alignValue(self["size"].value, 2)
+        if not size:
+            return
+        if self.handler:
+            yield self.handler(self, "content", size=size*8)
+        else:
+            yield RawBytes(self, "content", size)
+
+class PhotoshopMetadata(FieldSet):
+    def createFields(self):
+        yield CString(self, "signature", "Photoshop version")
+        if self["signature"].value == "Photoshop 3.0":
+            while not self.eof:
+                yield Photoshop8BIM(self, "item[]")
+        else:
+            size = (self._size - self.current_size) / 8
+            yield RawBytes(self, "rawdata", size)
+
diff --git a/lib/hachoir_parser/image/png.py b/lib/hachoir_parser/image/png.py
new file mode 100644
index 0000000000000000000000000000000000000000..66f1688e5ca630a2c7f116a984d0629ed77cd543
--- /dev/null
+++ b/lib/hachoir_parser/image/png.py
@@ -0,0 +1,268 @@
+"""
+PNG picture file parser.
+
+Documents:
+- RFC 2083
+  http://www.faqs.org/rfcs/rfc2083.html
+
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, Fragment,
+    ParserError, MissingField,
+    UInt8, UInt16, UInt32,
+    String, CString,
+    Bytes, RawBytes,
+    Bit, NullBits,
+    Enum, CompressedField)
+from hachoir_parser.image.common import RGB
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.endian import NETWORK_ENDIAN
+from hachoir_core.tools import humanFilesize
+from datetime import datetime
+
+MAX_FILESIZE = 500 * 1024 * 1024
+
+try:
+    from zlib import decompressobj
+
+    class Gunzip:
+        def __init__(self, stream):
+            self.gzip = decompressobj()
+
+        def __call__(self, size, data=None):
+            if data is None:
+                data = self.gzip.unconsumed_tail
+            return self.gzip.decompress(data, size)
+
+    has_deflate = True
+except ImportError:
+    has_deflate = False
+
+UNIT_NAME = {1: "Meter"}
+COMPRESSION_NAME = {
+    0: u"deflate" # with 32K sliding window
+}
+MAX_CHUNK_SIZE = 500 * 1024 # Maximum chunk size (500 KB)
+
+def headerParse(parent):
+    yield UInt32(parent, "width", "Width (pixels)")
+    yield UInt32(parent, "height", "Height (pixels)")
+    yield UInt8(parent, "bit_depth", "Bit depth")
+    yield NullBits(parent, "reserved", 5)
+    yield Bit(parent, "has_alpha", "Has alpha channel?")
+    yield Bit(parent, "color", "Color used?")
+    yield Bit(parent, "has_palette", "Has a color palette?")
+    yield Enum(UInt8(parent, "compression", "Compression method"), COMPRESSION_NAME)
+    yield UInt8(parent, "filter", "Filter method")
+    yield UInt8(parent, "interlace", "Interlace method")
+
+def headerDescription(parent):
+    return "Header: %ux%u pixels and %u bits/pixel" % \
+        (parent["width"].value, parent["height"].value, getBitsPerPixel(parent))
+
+def paletteParse(parent):
+    size = parent["size"].value
+    if (size % 3) != 0:
+        raise ParserError("Palette have invalid size (%s), should be 3*n!" % size)
+    nb_colors = size // 3
+    for index in xrange(nb_colors):
+        yield RGB(parent, "color[]")
+
+def paletteDescription(parent):
+    return "Palette: %u colors" % (parent["size"].value // 3)
+
+def gammaParse(parent):
+    yield UInt32(parent, "gamma", "Gamma (x100,000)")
+def gammaValue(parent):
+    return float(parent["gamma"].value) / 100000
+def gammaDescription(parent):
+    return "Gamma: %.3f" % parent.value
+
+def textParse(parent):
+    yield CString(parent, "keyword", "Keyword", charset="ISO-8859-1")
+    length = parent["size"].value - parent["keyword"].size/8
+    if length:
+        yield String(parent, "text", length, "Text", charset="ISO-8859-1")
+
+def textDescription(parent):
+    if "text" in parent:
+        return u'Text: %s' % parent["text"].display
+    else:
+        return u'Text'
+
+def timestampParse(parent):
+    yield UInt16(parent, "year", "Year")
+    yield UInt8(parent, "month", "Month")
+    yield UInt8(parent, "day", "Day")
+    yield UInt8(parent, "hour", "Hour")
+    yield UInt8(parent, "minute", "Minute")
+    yield UInt8(parent, "second", "Second")
+
+def timestampValue(parent):
+    value = datetime(
+        parent["year"].value, parent["month"].value, parent["day"].value,
+        parent["hour"].value, parent["minute"].value, parent["second"].value)
+    return value
+
+def physicalParse(parent):
+    yield UInt32(parent, "pixel_per_unit_x", "Pixel per unit, X axis")
+    yield UInt32(parent, "pixel_per_unit_y", "Pixel per unit, Y axis")
+    yield Enum(UInt8(parent, "unit", "Unit type"), UNIT_NAME)
+
+def physicalDescription(parent):
+    x = parent["pixel_per_unit_x"].value
+    y = parent["pixel_per_unit_y"].value
+    desc = "Physical: %ux%u pixels" % (x,y)
+    if parent["unit"].value == 1:
+        desc += " per meter"
+    return desc
+
+def parseBackgroundColor(parent):
+    yield UInt16(parent, "red")
+    yield UInt16(parent, "green")
+    yield UInt16(parent, "blue")
+
+def backgroundColorDesc(parent):
+    rgb = parent["red"].value, parent["green"].value, parent["blue"].value
+    name = RGB.color_name.get(rgb)
+    if not name:
+        name = "#%02X%02X%02X" % rgb
+    return "Background color: %s" % name
+
+
+class ImageData(Fragment):
+    def __init__(self, parent, name="compressed_data"):
+        Fragment.__init__(self, parent, name, None, 8*parent["size"].value)
+        data = parent.name.split('[')
+        data, next = "../%s[%%u]" % data[0], int(data[1][:-1]) + 1
+        first = parent.getField(data % 0)
+        if first is parent:
+            first = None
+            if has_deflate:
+                CompressedField(self, Gunzip)
+        else:
+            first = first[name]
+        try:
+            _next = parent[data % next]
+            next = lambda: _next[name]
+        except MissingField:
+            next = None
+        self.setLinks(first, next)
+
+def parseTransparency(parent):
+    for i in range(parent["size"].value):
+        yield UInt8(parent, "alpha_value[]", "Alpha value for palette entry %i"%i)
+
+def getBitsPerPixel(header):
+    nr_component = 1
+    if header["has_alpha"].value:
+        nr_component += 1
+    if header["color"].value and not header["has_palette"].value:
+        nr_component += 2
+    return nr_component * header["bit_depth"].value
+
+class Chunk(FieldSet):
+    TAG_INFO = {
+        "tIME": ("time", timestampParse, "Timestamp", timestampValue),
+        "pHYs": ("physical", physicalParse, physicalDescription, None),
+        "IHDR": ("header", headerParse, headerDescription, None),
+        "PLTE": ("palette", paletteParse, paletteDescription, None),
+        "gAMA": ("gamma", gammaParse, gammaDescription, gammaValue),
+        "tEXt": ("text[]", textParse, textDescription, None),
+        "tRNS": ("transparency", parseTransparency, "Transparency Info", None),
+
+        "bKGD": ("background", parseBackgroundColor, backgroundColorDesc, None),
+        "IDAT": ("data[]", lambda parent: (ImageData(parent),), "Image data", None),
+        "iTXt": ("utf8_text[]", None, "International text (encoded in UTF-8)", None),
+        "zTXt": ("comp_text[]", None, "Compressed text", None),
+        "IEND": ("end", None, "End", None)
+    }
+
+    def createValueFunc(self):
+        return self.value_func(self)
+
+    def __init__(self, parent, name, description=None):
+        FieldSet.__init__(self, parent, name, description)
+        self._size = (self["size"].value + 3*4) * 8
+        if MAX_CHUNK_SIZE < (self._size//8):
+            raise ParserError("PNG: Chunk is too big (%s)"
+                % humanFilesize(self._size//8))
+        tag = self["tag"].value
+        self.desc_func = None
+        self.value_func = None
+        if tag in self.TAG_INFO:
+            self._name, self.parse_func, desc, value_func = self.TAG_INFO[tag]
+            if value_func:
+                self.value_func = value_func
+                self.createValue = self.createValueFunc
+            if desc:
+                if isinstance(desc, str):
+                    self._description = desc
+                else:
+                    self.desc_func = desc
+        else:
+            self._description = ""
+            self.parse_func = None
+
+    def createFields(self):
+        yield UInt32(self, "size", "Size")
+        yield String(self, "tag", 4, "Tag", charset="ASCII")
+
+        size = self["size"].value
+        if size != 0:
+            if self.parse_func:
+                for field in self.parse_func(self):
+                    yield field
+            else:
+                yield RawBytes(self, "content", size, "Data")
+        yield textHandler(UInt32(self, "crc32", "CRC32"), hexadecimal)
+
+    def createDescription(self):
+        if self.desc_func:
+            return self.desc_func(self)
+        else:
+            return "Chunk: %s" % self["tag"].display
+
+class PngFile(Parser):
+    PARSER_TAGS = {
+        "id": "png",
+        "category": "image",
+        "file_ext": ("png",),
+        "mime": (u"image/png", u"image/x-png"),
+        "min_size": 8*8, # just the identifier
+        "magic": [('\x89PNG\r\n\x1A\n', 0)],
+        "description": "Portable Network Graphics (PNG) picture"
+    }
+    endian = NETWORK_ENDIAN
+
+    def validate(self):
+        if self["id"].value != '\x89PNG\r\n\x1A\n':
+            return "Invalid signature"
+        if self[1].name != "header":
+            return "First chunk is not header"
+        return True
+
+    def createFields(self):
+        yield Bytes(self, "id", 8, r"PNG identifier ('\x89PNG\r\n\x1A\n')")
+        while not self.eof:
+            yield Chunk(self, "chunk[]")
+
+    def createDescription(self):
+        header = self["header"]
+        desc = "PNG picture: %ux%ux%u" % (
+            header["width"].value, header["height"].value, getBitsPerPixel(header))
+        if header["has_alpha"].value:
+            desc += " (alpha layer)"
+        return desc
+
+    def createContentSize(self):
+        field = self["header"]
+        start = field.absolute_address + field.size
+        end = MAX_FILESIZE * 8
+        pos = self.stream.searchBytes("\0\0\0\0IEND\xae\x42\x60\x82", start, end)
+        if pos is not None:
+            return pos + 12*8
+        return None
+
diff --git a/lib/hachoir_parser/image/psd.py b/lib/hachoir_parser/image/psd.py
new file mode 100644
index 0000000000000000000000000000000000000000..6ea09fb182252f34a88de2a25f31728cefc51dae
--- /dev/null
+++ b/lib/hachoir_parser/image/psd.py
@@ -0,0 +1,85 @@
+"""
+Photoshop parser (.psd file).
+
+Creation date: 8 january 2006
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt16, UInt32, String, NullBytes, Enum, RawBytes)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_parser.image.photoshop_metadata import Photoshop8BIM
+
+class Config(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = (4 + self["size"].value) * 8
+
+    def createFields(self):
+        yield UInt32(self, "size")
+        while not self.eof:
+            yield Photoshop8BIM(self, "item[]")
+
+class PsdFile(Parser):
+    endian = BIG_ENDIAN
+    PARSER_TAGS = {
+        "id": "psd",
+        "category": "image",
+        "file_ext": ("psd",),
+        "mime": (u"image/psd", u"image/photoshop", u"image/x-photoshop"),
+        "min_size": 4*8,
+        "magic": (("8BPS\0\1",0),),
+        "description": "Photoshop (PSD) picture",
+    }
+    COLOR_MODE = {
+        0: u"Bitmap",
+        1: u"Grayscale",
+        2: u"Indexed",
+        3: u"RGB color",
+        4: u"CMYK color",
+        7: u"Multichannel",
+        8: u"Duotone",
+        9: u"Lab Color",
+    }
+    COMPRESSION_NAME = {
+        0: "Raw data",
+        1: "RLE",
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != "8BPS":
+            return "Invalid signature"
+        return True
+
+    def createFields(self):
+        yield String(self, "signature", 4, "PSD signature (8BPS)", charset="ASCII")
+        yield UInt16(self, "version")
+        yield NullBytes(self, "reserved[]", 6)
+        yield UInt16(self, "nb_channels")
+        yield UInt32(self, "width")
+        yield UInt32(self, "height")
+        yield UInt16(self, "depth")
+        yield Enum(UInt16(self, "color_mode"), self.COLOR_MODE)
+
+        # Mode data
+        yield UInt32(self, "mode_data_size")
+        size = self["mode_data_size"].value
+        if size:
+            yield RawBytes(self, "mode_data", size)
+
+        # Resources
+        yield Config(self, "config")
+
+        # Reserved
+        yield UInt32(self, "reserved_data_size")
+        size = self["reserved_data_size"].value
+        if size:
+            yield RawBytes(self, "reserved_data", size)
+
+        yield Enum(UInt16(self, "compression"), self.COMPRESSION_NAME)
+
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield RawBytes(self, "end", size)
+
diff --git a/lib/hachoir_parser/image/tga.py b/lib/hachoir_parser/image/tga.py
new file mode 100644
index 0000000000000000000000000000000000000000..716ab28a509e9d3a2306a4c8cac6d09609117359
--- /dev/null
+++ b/lib/hachoir_parser/image/tga.py
@@ -0,0 +1,85 @@
+"""
+Truevision Targa Graphic (TGA) picture parser.
+
+Author: Victor Stinner
+Creation: 18 december 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import FieldSet, UInt8, UInt16, Enum, RawBytes
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_parser.image.common import PaletteRGB
+
+class Line(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["/width"].value * self["/bpp"].value
+
+    def createFields(self):
+        for x in xrange(self["/width"].value):
+            yield UInt8(self, "pixel[]")
+
+class Pixels(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["/width"].value * self["/height"].value * self["/bpp"].value
+
+    def createFields(self):
+        if self["/options"].value == 0:
+            RANGE = xrange(self["/height"].value-1,-1,-1)
+        else:
+            RANGE = xrange(self["/height"].value)
+        for y in RANGE:
+            yield Line(self, "line[%u]" % y)
+
+class TargaFile(Parser):
+    PARSER_TAGS = {
+        "id": "targa",
+        "category": "image",
+        "file_ext": ("tga",),
+        "mime": (u"image/targa", u"image/tga", u"image/x-tga"),
+        "min_size": 18*8,
+        "description": u"Truevision Targa Graphic (TGA)"
+    }
+    CODEC_NAME = {
+         1: u"8-bit uncompressed",
+         2: u"24-bit uncompressed",
+         9: u"8-bit RLE",
+        10: u"24-bit RLE",
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self["version"].value != 1:
+            return "Unknown version"
+        if self["codec"].value not in self.CODEC_NAME:
+            return "Unknown codec"
+        if self["x_min"].value != 0 or self["y_min"].value != 0:
+            return "(x_min, y_min) is not (0,0)"
+        if self["bpp"].value not in (8, 24):
+            return "Unknown bits/pixel value"
+        return True
+
+    def createFields(self):
+        yield UInt8(self, "hdr_size", "Header size in bytes")
+        yield UInt8(self, "version", "Targa version (always one)")
+        yield Enum(UInt8(self, "codec", "Pixels encoding"), self.CODEC_NAME)
+        yield UInt16(self, "palette_ofs", "Palette absolute file offset")
+        yield UInt16(self, "nb_color", "Number of color")
+        yield UInt8(self, "color_map_size", "Color map entry size")
+        yield UInt16(self, "x_min")
+        yield UInt16(self, "y_min")
+        yield UInt16(self, "width")
+        yield UInt16(self, "height")
+        yield UInt8(self, "bpp", "Bits per pixel")
+        yield UInt8(self, "options", "Options (0: vertical mirror)")
+        if self["bpp"].value == 8:
+            yield PaletteRGB(self, "palette", 256)
+        if self["codec"].value == 1:
+            yield Pixels(self, "pixels")
+        else:
+            size = (self.size - self.current_size) // 8
+            if size:
+                yield RawBytes(self, "raw_pixels", size)
+
+
diff --git a/lib/hachoir_parser/image/tiff.py b/lib/hachoir_parser/image/tiff.py
new file mode 100644
index 0000000000000000000000000000000000000000..a096212f50ba279f1188f4cf449e4f46c913e936
--- /dev/null
+++ b/lib/hachoir_parser/image/tiff.py
@@ -0,0 +1,211 @@
+"""
+TIFF image parser.
+
+Authors: Victor Stinner and Sebastien Ponce
+Creation date: 30 september 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, SeekableFieldSet, ParserError, RootSeekableFieldSet,
+    UInt16, UInt32, Bytes, String)
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+from hachoir_parser.image.exif import BasicIFDEntry
+from hachoir_core.tools import createDict
+
+MAX_COUNT = 250
+
+class IFDEntry(BasicIFDEntry):
+    static_size = 12*8
+
+    TAG_INFO = {
+        254: ("new_subfile_type", "New subfile type"),
+        255: ("subfile_type", "Subfile type"),
+        256: ("img_width", "Image width in pixels"),
+        257: ("img_height", "Image height in pixels"),
+        258: ("bits_per_sample", "Bits per sample"),
+        259: ("compression", "Compression method"),
+        262: ("photo_interpret", "Photometric interpretation"),
+        263: ("thres", "Thresholding"),
+        264: ("cell_width", "Cellule width"),
+        265: ("cell_height", "Cellule height"),
+        266: ("fill_order", "Fill order"),
+        269: ("doc_name", "Document name"),
+        270: ("description", "Image description"),
+        271: ("make", "Make"),
+        272: ("model", "Model"),
+        273: ("strip_ofs", "Strip offsets"),
+        274: ("orientation", "Orientation"),
+        277: ("sample_pixel", "Samples per pixel"),
+        278: ("row_per_strip", "Rows per strip"),
+        279: ("strip_byte", "Strip byte counts"),
+        280: ("min_sample_value", "Min sample value"),
+        281: ("max_sample_value", "Max sample value"),
+        282: ("xres", "X resolution"),
+        283: ("yres", "Y resolution"),
+        284: ("planar_conf", "Planar configuration"),
+        285: ("page_name", "Page name"),
+        286: ("xpos", "X position"),
+        287: ("ypos", "Y position"),
+        288: ("free_ofs", "Free offsets"),
+        289: ("free_byte", "Free byte counts"),
+        290: ("gray_resp_unit", "Gray response unit"),
+        291: ("gray_resp_curve", "Gray response curve"),
+        292: ("group3_opt", "Group 3 options"),
+        293: ("group4_opt", "Group 4 options"),
+        296: ("res_unit", "Resolution unit"),
+        297: ("page_nb", "Page number"),
+        301: ("color_respt_curve", "Color response curves"),
+        305: ("software", "Software"),
+        306: ("date_time", "Date time"),
+        315: ("artist", "Artist"),
+        316: ("host_computer", "Host computer"),
+        317: ("predicator", "Predicator"),
+        318: ("white_pt", "White point"),
+        319: ("prim_chomat", "Primary chromaticities"),
+        320: ("color_map", "Color map"),
+        321: ("half_tone_hints", "Halftone Hints"),
+        322: ("tile_width", "TileWidth"),
+        323: ("tile_length", "TileLength"),
+        324: ("tile_offsets", "TileOffsets"),
+        325: ("tile_byte_counts", "TileByteCounts"),
+        332: ("ink_set", "InkSet"),
+        333: ("ink_names", "InkNames"),
+        334: ("number_of_inks", "NumberOfInks"),
+        336: ("dot_range", "DotRange"),
+        337: ("target_printer", "TargetPrinter"),
+        338: ("extra_samples", "ExtraSamples"),
+        339: ("sample_format", "SampleFormat"),
+        340: ("smin_sample_value", "SMinSampleValue"),
+        341: ("smax_sample_value", "SMaxSampleValue"),
+        342: ("transfer_range", "TransferRange"),
+        512: ("jpeg_proc", "JPEGProc"),
+        513: ("jpeg_interchange_format", "JPEGInterchangeFormat"),
+        514: ("jpeg_interchange_format_length", "JPEGInterchangeFormatLength"),
+        515: ("jpeg_restart_interval", "JPEGRestartInterval"),
+        517: ("jpeg_lossless_predictors", "JPEGLosslessPredictors"),
+        518: ("jpeg_point_transforms", "JPEGPointTransforms"),
+        519: ("jpeg_qtables", "JPEGQTables"),
+        520: ("jpeg_dctables", "JPEGDCTables"),
+        521: ("jpeg_actables", "JPEGACTables"),
+        529: ("ycbcr_coefficients", "YCbCrCoefficients"),
+        530: ("ycbcr_subsampling", "YCbCrSubSampling"),
+        531: ("ycbcr_positioning", "YCbCrPositioning"),
+        532: ("reference_blackwhite", "ReferenceBlackWhite"),
+        33432: ("copyright", "Copyright"),
+        0x8769: ("ifd_pointer", "Pointer to next IFD entry"),
+    }
+    TAG_NAME = createDict(TAG_INFO, 0)
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        tag = self["tag"].value
+        if tag in self.TAG_INFO:
+            self._name, self._description = self.TAG_INFO[tag]
+        else:
+            self._parser = None
+
+class IFD(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = 16 + self["count"].value * IFDEntry.static_size
+        self._has_offset = False
+
+    def createFields(self):
+        yield UInt16(self, "count")
+        if MAX_COUNT < self["count"].value:
+            raise ParserError("TIFF IFD: Invalid count (%s)"
+                % self["count"].value)
+        for index in xrange(self["count"].value):
+            yield IFDEntry(self, "entry[]")
+
+class ImageFile(SeekableFieldSet):
+    def __init__(self, parent, name, description, ifd):
+        SeekableFieldSet.__init__(self, parent, name, description, None)
+        self._has_offset = False
+        self._ifd = ifd
+
+    def createFields(self):
+        datas = {}
+        for entry in self._ifd:
+            if type(entry) != IFDEntry:
+                continue
+            for c in entry:
+                if c.name != "offset":
+                    continue
+                self.seekByte(c.value, False)
+                desc = "data of ifd entry " + entry.name,
+                entryType = BasicIFDEntry.ENTRY_FORMAT[entry["type"].value]
+                count = entry["count"].value
+                if entryType == String:
+                    yield String(self, entry.name, count, desc, "\0", "ISO-8859-1")
+                else:    
+                    d = Data(self, entry.name, desc, entryType, count)
+                    datas[d.name] = d
+                    yield d
+                break
+        # image data
+        if "strip_ofs" in datas and "strip_byte" in datas:
+            for i in xrange(datas["strip_byte"]._count):
+                self.seekByte(datas["strip_ofs"]["value["+str(i)+"]"].value, False)
+                yield Bytes(self, "strip[]", datas["strip_byte"]["value["+str(i)+"]"].value)
+
+class Data(FieldSet):
+
+    def __init__(self, parent, name, desc, type, count):
+        size = type.static_size * count
+        FieldSet.__init__(self, parent, name, desc, size)
+        self._count = count
+        self._type = type
+
+    def createFields(self):
+        for i in xrange(self._count):
+            yield self._type(self, "value[]")
+
+class TiffFile(RootSeekableFieldSet, Parser):
+    PARSER_TAGS = {
+        "id": "tiff",
+        "category": "image",
+        "file_ext": ("tif", "tiff"),
+        "mime": (u"image/tiff",),
+        "min_size": 8*8,
+# TODO: Re-enable magic
+        "magic": (("II\x2A\0", 0), ("MM\0\x2A", 0)),
+        "description": "TIFF picture"
+    }
+
+    # Correct endian is set in constructor
+    endian = LITTLE_ENDIAN
+
+    def __init__(self, stream, **args):
+        RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self))
+        if self.stream.readBytes(0, 2) == "MM":
+            self.endian = BIG_ENDIAN
+        Parser.__init__(self, stream, **args)
+
+    def validate(self):
+        endian = self.stream.readBytes(0, 2)
+        if endian not in ("MM", "II"):
+            return "Invalid endian (%r)" % endian
+        if self["version"].value != 42:
+            return "Unknown TIFF version"
+        return True
+
+    def createFields(self):
+        yield String(self, "endian", 2, 'Endian ("II" or "MM")', charset="ASCII")
+        yield UInt16(self, "version", "TIFF version number")
+        offset = UInt32(self, "img_dir_ofs[]", "Next image directory offset (in bytes from the beginning)")
+        yield offset
+        ifds = []
+        while True:
+            if offset.value == 0:
+                break
+
+            self.seekByte(offset.value, relative=False)
+            ifd = IFD(self, "ifd[]", "Image File Directory", None)
+            ifds.append(ifd)
+            yield ifd
+            offset = UInt32(self, "img_dir_ofs[]", "Next image directory offset (in bytes from the beginning)")
+            yield offset
+        for ifd in ifds:
+            image = ImageFile(self, "image[]", "Image File", ifd)
+            yield image
diff --git a/lib/hachoir_parser/image/wmf.py b/lib/hachoir_parser/image/wmf.py
new file mode 100644
index 0000000000000000000000000000000000000000..86f9840b7201f26e6bddf954d492732bfcdf145e
--- /dev/null
+++ b/lib/hachoir_parser/image/wmf.py
@@ -0,0 +1,611 @@
+"""
+Hachoir parser of Microsoft Windows Metafile (WMF) file format.
+
+Documentation:
+ - Microsoft Windows Metafile; also known as: WMF,
+   Enhanced Metafile, EMF, APM
+   http://wvware.sourceforge.net/caolan/ora-wmf.html
+ - libwmf source code:
+     - include/libwmf/defs.h: enums
+     - src/player/meta.h: arguments parsers
+ - libemf source code
+
+Author: Victor Stinner
+Creation date: 26 december 2006
+"""
+
+MAX_FILESIZE = 50 * 1024 * 1024
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, StaticFieldSet, Enum,
+    MissingField, ParserError,
+    UInt32, Int32, UInt16, Int16, UInt8, NullBytes, RawBytes, String)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.tools import createDict
+from hachoir_parser.image.common import RGBA
+
+POLYFILL_MODE = {1: "Alternate", 2: "Winding"}
+
+BRUSH_STYLE = {
+    0: u"Solid",
+    1: u"Null",
+    2: u"Hollow",
+    3: u"Pattern",
+    4: u"Indexed",
+    5: u"DIB pattern",
+    6: u"DIB pattern point",
+    7: u"Pattern 8x8",
+    8: u"DIB pattern 8x8",
+}
+
+HATCH_STYLE = {
+    0: u"Horizontal",      # -----
+    1: u"Vertical",        # |||||
+    2: u"FDIAGONAL",       # \\\\\
+    3: u"BDIAGONAL",       # /////
+    4: u"Cross",           # +++++
+    5: u"Diagonal cross",  # xxxxx
+}
+
+PEN_STYLE = {
+    0: u"Solid",
+    1: u"Dash",          # -------
+    2: u"Dot",           # .......
+    3: u"Dash dot",      # _._._._
+    4: u"Dash dot dot",  # _.._.._
+    5: u"Null",
+    6: u"Inside frame",
+    7: u"User style",
+    8: u"Alternate",
+}
+
+# Binary raster operations
+ROP2_DESC = {
+     1: u"Black (0)",
+     2: u"Not merge pen (DPon)",
+     3: u"Mask not pen (DPna)",
+     4: u"Not copy pen (PN)",
+     5: u"Mask pen not (PDna)",
+     6: u"Not (Dn)",
+     7: u"Xor pen (DPx)",
+     8: u"Not mask pen (DPan)",
+     9: u"Mask pen (DPa)",
+    10: u"Not xor pen (DPxn)",
+    11: u"No operation (D)",
+    12: u"Merge not pen (DPno)",
+    13: u"Copy pen (P)",
+    14: u"Merge pen not (PDno)",
+    15: u"Merge pen (DPo)",
+    16: u"White (1)",
+}
+
+def parseXY(parser):
+    yield Int16(parser, "x")
+    yield Int16(parser, "y")
+
+def parseCreateBrushIndirect(parser):
+    yield Enum(UInt16(parser, "brush_style"), BRUSH_STYLE)
+    yield RGBA(parser, "color")
+    yield Enum(UInt16(parser, "brush_hatch"), HATCH_STYLE)
+
+def parsePenIndirect(parser):
+    yield Enum(UInt16(parser, "pen_style"), PEN_STYLE)
+    yield UInt16(parser, "pen_width")
+    yield UInt16(parser, "pen_height")
+    yield RGBA(parser, "color")
+
+def parsePolyFillMode(parser):
+    yield Enum(UInt16(parser, "operation"), POLYFILL_MODE)
+
+def parseROP2(parser):
+    yield Enum(UInt16(parser, "operation"), ROP2_DESC)
+
+def parseObjectID(parser):
+    yield UInt16(parser, "object_id")
+
+class Point(FieldSet):
+    static_size = 32
+    def createFields(self):
+        yield Int16(self, "x")
+        yield Int16(self, "y")
+    def createDescription(self):
+        return "Point (%s, %s)" % (self["x"].value, self["y"].value)
+
+def parsePolygon(parser):
+    yield UInt16(parser, "count")
+    for index in xrange(parser["count"].value):
+        yield Point(parser, "point[]")
+
+META = {
+    0x0000: ("EOF", u"End of file", None),
+    0x001E: ("SAVEDC", u"Save device context", None),
+    0x0035: ("REALIZEPALETTE", u"Realize palette", None),
+    0x0037: ("SETPALENTRIES", u"Set palette entries", None),
+    0x00f7: ("CREATEPALETTE", u"Create palette", None),
+    0x0102: ("SETBKMODE", u"Set background mode", None),
+    0x0103: ("SETMAPMODE", u"Set mapping mode", None),
+    0x0104: ("SETROP2", u"Set foreground mix mode", parseROP2),
+    0x0106: ("SETPOLYFILLMODE", u"Set polygon fill mode", parsePolyFillMode),
+    0x0107: ("SETSTRETCHBLTMODE", u"Set bitmap streching mode", None),
+    0x0108: ("SETTEXTCHAREXTRA", u"Set text character extra", None),
+    0x0127: ("RESTOREDC", u"Restore device context", None),
+    0x012A: ("INVERTREGION", u"Invert region", None),
+    0x012B: ("PAINTREGION", u"Paint region", None),
+    0x012C: ("SELECTCLIPREGION", u"Select clipping region", None),
+    0x012D: ("SELECTOBJECT", u"Select object", parseObjectID),
+    0x012E: ("SETTEXTALIGN", u"Set text alignment", None),
+    0x0142: ("CREATEDIBPATTERNBRUSH", u"Create DIB brush with specified pattern", None),
+    0x01f0: ("DELETEOBJECT", u"Delete object", parseObjectID),
+    0x0201: ("SETBKCOLOR", u"Set background color", None),
+    0x0209: ("SETTEXTCOLOR", u"Set text color", None),
+    0x020A: ("SETTEXTJUSTIFICATION", u"Set text justification", None),
+    0x020B: ("SETWINDOWORG", u"Set window origin", parseXY),
+    0x020C: ("SETWINDOWEXT", u"Set window extends", parseXY),
+    0x020D: ("SETVIEWPORTORG", u"Set view port origin", None),
+    0x020E: ("SETVIEWPORTEXT", u"Set view port extends", None),
+    0x020F: ("OFFSETWINDOWORG", u"Offset window origin", None),
+    0x0211: ("OFFSETVIEWPORTORG", u"Offset view port origin", None),
+    0x0213: ("LINETO", u"Draw a line to", None),
+    0x0214: ("MOVETO", u"Move to", None),
+    0x0220: ("OFFSETCLIPRGN", u"Offset clipping rectangle", None),
+    0x0228: ("FILLREGION", u"Fill region", None),
+    0x0231: ("SETMAPPERFLAGS", u"Set mapper flags", None),
+    0x0234: ("SELECTPALETTE", u"Select palette", None),
+    0x02FB: ("CREATEFONTINDIRECT", u"Create font indirect", None),
+    0x02FA: ("CREATEPENINDIRECT", u"Create pen indirect", parsePenIndirect),
+    0x02FC: ("CREATEBRUSHINDIRECT", u"Create brush indirect", parseCreateBrushIndirect),
+    0x0324: ("POLYGON", u"Draw a polygon", parsePolygon),
+    0x0325: ("POLYLINE", u"Draw a polyline", None),
+    0x0410: ("SCALEWINDOWEXT", u"Scale window extends", None),
+    0x0412: ("SCALEVIEWPORTEXT", u"Scale view port extends", None),
+    0x0415: ("EXCLUDECLIPRECT", u"Exclude clipping rectangle", None),
+    0x0416: ("INTERSECTCLIPRECT", u"Intersect clipping rectangle", None),
+    0x0418: ("ELLIPSE", u"Draw an ellipse", None),
+    0x0419: ("FLOODFILL", u"Flood fill", None),
+    0x041B: ("RECTANGLE", u"Draw a rectangle", None),
+    0x041F: ("SETPIXEL", u"Set pixel", None),
+    0x0429: ("FRAMEREGION", u"Fram region", None),
+    0x0521: ("TEXTOUT", u"Draw text", None),
+    0x0538: ("POLYPOLYGON", u"Draw multiple polygons", None),
+    0x0548: ("EXTFLOODFILL", u"Extend flood fill", None),
+    0x061C: ("ROUNDRECT", u"Draw a rounded rectangle", None),
+    0x061D: ("PATBLT", u"Pattern blitting", None),
+    0x0626: ("ESCAPE", u"Escape", None),
+    0x06FF: ("CREATEREGION", u"Create region", None),
+    0x0817: ("ARC", u"Draw an arc", None),
+    0x081A: ("PIE", u"Draw a pie", None),
+    0x0830: ("CHORD", u"Draw a chord", None),
+    0x0940: ("DIBBITBLT", u"DIB bit blitting", None),
+    0x0a32: ("EXTTEXTOUT", u"Draw text (extra)", None),
+    0x0b41: ("DIBSTRETCHBLT", u"DIB stretch blitting", None),
+    0x0d33: ("SETDIBTODEV", u"Set DIB to device", None),
+    0x0f43: ("STRETCHDIB", u"Stretch DIB", None),
+}
+META_NAME = createDict(META, 0)
+META_DESC = createDict(META, 1)
+
+#----------------------------------------------------------------------------
+# EMF constants
+
+# EMF mapping modes
+EMF_MAPPING_MODE = {
+    1: "TEXT",
+    2: "LOMETRIC",
+    3: "HIMETRIC",
+    4: "LOENGLISH",
+    5: "HIENGLISH",
+    6: "TWIPS",
+    7: "ISOTROPIC",
+    8: "ANISOTROPIC",
+}
+
+#----------------------------------------------------------------------------
+# EMF parser
+
+def parseEmfMappingMode(parser):
+    yield Enum(Int32(parser, "mapping_mode"), EMF_MAPPING_MODE)
+
+def parseXY32(parser):
+    yield Int32(parser, "x")
+    yield Int32(parser, "y")
+
+def parseObjectID32(parser):
+    yield textHandler(UInt32(parser, "object_id"), hexadecimal)
+
+def parseBrushIndirect(parser):
+    yield UInt32(parser, "ihBrush")
+    yield UInt32(parser, "style")
+    yield RGBA(parser, "color")
+    yield Int32(parser, "hatch")
+
+class Point16(FieldSet):
+    static_size = 32
+    def createFields(self):
+        yield Int16(self, "x")
+        yield Int16(self, "y")
+    def createDescription(self):
+        return "Point16: (%i,%i)" % (self["x"].value, self["y"].value)
+
+def parsePoint16array(parser):
+    yield RECT32(parser, "bounds")
+    yield UInt32(parser, "count")
+    for index in xrange(parser["count"].value):
+        yield Point16(parser, "point[]")
+
+def parseGDIComment(parser):
+    yield UInt32(parser, "data_size")
+    size = parser["data_size"].value
+    if size:
+        yield RawBytes(parser, "data", size)
+
+def parseICMMode(parser):
+    yield UInt32(parser, "icm_mode")
+
+def parseExtCreatePen(parser):
+    yield UInt32(parser, "ihPen")
+    yield UInt32(parser, "offBmi")
+    yield UInt32(parser, "cbBmi")
+    yield UInt32(parser, "offBits")
+    yield UInt32(parser, "cbBits")
+    yield UInt32(parser, "pen_style")
+    yield UInt32(parser, "width")
+    yield UInt32(parser, "brush_style")
+    yield RGBA(parser, "color")
+    yield UInt32(parser, "hatch")
+    yield UInt32(parser, "nb_style")
+    for index in xrange(parser["nb_style"].value):
+        yield UInt32(parser, "style")
+
+EMF_META = {
+    1: ("HEADER", u"Header", None),
+    2: ("POLYBEZIER", u"Draw poly bezier", None),
+    3: ("POLYGON", u"Draw polygon", None),
+    4: ("POLYLINE", u"Draw polyline", None),
+    5: ("POLYBEZIERTO", u"Draw poly bezier to", None),
+    6: ("POLYLINETO", u"Draw poly line to", None),
+    7: ("POLYPOLYLINE", u"Draw poly polyline", None),
+    8: ("POLYPOLYGON", u"Draw poly polygon", None),
+    9: ("SETWINDOWEXTEX", u"Set window extend EX", parseXY32),
+    10: ("SETWINDOWORGEX", u"Set window origin EX", parseXY32),
+    11: ("SETVIEWPORTEXTEX", u"Set viewport extend EX", parseXY32),
+    12: ("SETVIEWPORTORGEX", u"Set viewport origin EX", parseXY32),
+    13: ("SETBRUSHORGEX", u"Set brush org EX", None),
+    14: ("EOF", u"End of file", None),
+    15: ("SETPIXELV", u"Set pixel V", None),
+    16: ("SETMAPPERFLAGS", u"Set mapper flags", None),
+    17: ("SETMAPMODE", u"Set mapping mode", parseEmfMappingMode),
+    18: ("SETBKMODE", u"Set background mode", None),
+    19: ("SETPOLYFILLMODE", u"Set polyfill mode", None),
+    20: ("SETROP2", u"Set ROP2", None),
+    21: ("SETSTRETCHBLTMODE", u"Set stretching blitting mode", None),
+    22: ("SETTEXTALIGN", u"Set text align", None),
+    23: ("SETCOLORADJUSTMENT", u"Set color adjustment", None),
+    24: ("SETTEXTCOLOR", u"Set text color", None),
+    25: ("SETBKCOLOR", u"Set background color", None),
+    26: ("OFFSETCLIPRGN", u"Offset clipping region", None),
+    27: ("MOVETOEX", u"Move to EX", parseXY32),
+    28: ("SETMETARGN", u"Set meta region", None),
+    29: ("EXCLUDECLIPRECT", u"Exclude clipping rectangle", None),
+    30: ("INTERSECTCLIPRECT", u"Intersect clipping rectangle", None),
+    31: ("SCALEVIEWPORTEXTEX", u"Scale viewport extend EX", None),
+    32: ("SCALEWINDOWEXTEX", u"Scale window extend EX", None),
+    33: ("SAVEDC", u"Save device context", None),
+    34: ("RESTOREDC", u"Restore device context", None),
+    35: ("SETWORLDTRANSFORM", u"Set world transform", None),
+    36: ("MODIFYWORLDTRANSFORM", u"Modify world transform", None),
+    37: ("SELECTOBJECT", u"Select object", parseObjectID32),
+    38: ("CREATEPEN", u"Create pen", None),
+    39: ("CREATEBRUSHINDIRECT", u"Create brush indirect", parseBrushIndirect),
+    40: ("DELETEOBJECT", u"Delete object", parseObjectID32),
+    41: ("ANGLEARC", u"Draw angle arc", None),
+    42: ("ELLIPSE", u"Draw ellipse", None),
+    43: ("RECTANGLE", u"Draw rectangle", None),
+    44: ("ROUNDRECT", u"Draw rounded rectangle", None),
+    45: ("ARC", u"Draw arc", None),
+    46: ("CHORD", u"Draw chord", None),
+    47: ("PIE", u"Draw pie", None),
+    48: ("SELECTPALETTE", u"Select palette", None),
+    49: ("CREATEPALETTE", u"Create palette", None),
+    50: ("SETPALETTEENTRIES", u"Set palette entries", None),
+    51: ("RESIZEPALETTE", u"Resize palette", None),
+    52: ("REALIZEPALETTE", u"Realize palette", None),
+    53: ("EXTFLOODFILL", u"EXT flood fill", None),
+    54: ("LINETO", u"Draw line to", parseXY32),
+    55: ("ARCTO", u"Draw arc to", None),
+    56: ("POLYDRAW", u"Draw poly draw", None),
+    57: ("SETARCDIRECTION", u"Set arc direction", None),
+    58: ("SETMITERLIMIT", u"Set miter limit", None),
+    59: ("BEGINPATH", u"Begin path", None),
+    60: ("ENDPATH", u"End path", None),
+    61: ("CLOSEFIGURE", u"Close figure", None),
+    62: ("FILLPATH", u"Fill path", None),
+    63: ("STROKEANDFILLPATH", u"Stroke and fill path", None),
+    64: ("STROKEPATH", u"Stroke path", None),
+    65: ("FLATTENPATH", u"Flatten path", None),
+    66: ("WIDENPATH", u"Widen path", None),
+    67: ("SELECTCLIPPATH", u"Select clipping path", None),
+    68: ("ABORTPATH", u"Arbort path", None),
+    70: ("GDICOMMENT", u"GDI comment", parseGDIComment),
+    71: ("FILLRGN", u"Fill region", None),
+    72: ("FRAMERGN", u"Frame region", None),
+    73: ("INVERTRGN", u"Invert region", None),
+    74: ("PAINTRGN", u"Paint region", None),
+    75: ("EXTSELECTCLIPRGN", u"EXT select clipping region", None),
+    76: ("BITBLT", u"Bit blitting", None),
+    77: ("STRETCHBLT", u"Stretch blitting", None),
+    78: ("MASKBLT", u"Mask blitting", None),
+    79: ("PLGBLT", u"PLG blitting", None),
+    80: ("SETDIBITSTODEVICE", u"Set DIB bits to device", None),
+    81: ("STRETCHDIBITS", u"Stretch DIB bits", None),
+    82: ("EXTCREATEFONTINDIRECTW", u"EXT create font indirect W", None),
+    83: ("EXTTEXTOUTA", u"EXT text out A", None),
+    84: ("EXTTEXTOUTW", u"EXT text out W", None),
+    85: ("POLYBEZIER16", u"Draw poly bezier (16-bit)", None),
+    86: ("POLYGON16", u"Draw polygon (16-bit)", parsePoint16array),
+    87: ("POLYLINE16", u"Draw polyline (16-bit)", parsePoint16array),
+    88: ("POLYBEZIERTO16", u"Draw poly bezier to (16-bit)", parsePoint16array),
+    89: ("POLYLINETO16", u"Draw polyline to (16-bit)", parsePoint16array),
+    90: ("POLYPOLYLINE16", u"Draw poly polyline (16-bit)", None),
+    91: ("POLYPOLYGON16", u"Draw poly polygon (16-bit)", parsePoint16array),
+    92: ("POLYDRAW16", u"Draw poly draw (16-bit)", None),
+    93: ("CREATEMONOBRUSH", u"Create monobrush", None),
+    94: ("CREATEDIBPATTERNBRUSHPT", u"Create DIB pattern brush PT", None),
+    95: ("EXTCREATEPEN", u"EXT create pen", parseExtCreatePen),
+    96: ("POLYTEXTOUTA", u"Poly text out A", None),
+    97: ("POLYTEXTOUTW", u"Poly text out W", None),
+    98: ("SETICMMODE", u"Set ICM mode", parseICMMode),
+    99: ("CREATECOLORSPACE", u"Create color space", None),
+    100: ("SETCOLORSPACE", u"Set color space", None),
+    101: ("DELETECOLORSPACE", u"Delete color space", None),
+    102: ("GLSRECORD", u"GLS record", None),
+    103: ("GLSBOUNDEDRECORD", u"GLS bound ED record", None),
+    104: ("PIXELFORMAT", u"Pixel format", None),
+}
+EMF_META_NAME = createDict(EMF_META, 0)
+EMF_META_DESC = createDict(EMF_META, 1)
+
+class Function(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        if self.root.isEMF():
+            self._size = self["size"].value * 8
+        else:
+            self._size = self["size"].value * 16
+
+    def createFields(self):
+        if self.root.isEMF():
+            yield Enum(UInt32(self, "function"), EMF_META_NAME)
+            yield UInt32(self, "size")
+            try:
+                parser = EMF_META[self["function"].value][2]
+            except KeyError:
+                parser = None
+        else:
+            yield UInt32(self, "size")
+            yield Enum(UInt16(self, "function"), META_NAME)
+            try:
+                parser = META[self["function"].value][2]
+            except KeyError:
+                parser = None
+        if parser:
+            for field in parser(self):
+                yield field
+        else:
+            size = (self.size - self.current_size) // 8
+            if size:
+                yield RawBytes(self, "data", size)
+
+    def isValid(self):
+        func = self["function"]
+        return func.value in func.getEnum()
+
+    def createDescription(self):
+        if self.root.isEMF():
+            return EMF_META_DESC[self["function"].value]
+        try:
+            return META_DESC[self["function"].value]
+        except KeyError:
+            return "Function %s" % self["function"].display
+
+class RECT16(StaticFieldSet):
+    format = (
+        (Int16, "left"),
+        (Int16, "top"),
+        (Int16, "right"),
+        (Int16, "bottom"),
+    )
+    def createDescription(self):
+        return "%s: %ux%u at (%u,%u)" % (
+            self.__class__.__name__,
+            self["right"].value-self["left"].value,
+            self["bottom"].value-self["top"].value,
+            self["left"].value,
+            self["top"].value)
+
+class RECT32(RECT16):
+    format = (
+        (Int32, "left"),
+        (Int32, "top"),
+        (Int32, "right"),
+        (Int32, "bottom"),
+    )
+
+class PlaceableHeader(FieldSet):
+    """
+    Header of Placeable Metafile (file extension .APM),
+    created by Aldus Corporation
+    """
+    MAGIC = "\xD7\xCD\xC6\x9A\0\0"   # (magic, handle=0x0000)
+
+    def createFields(self):
+        yield textHandler(UInt32(self, "signature", "Placeable Metafiles signature (0x9AC6CDD7)"), hexadecimal)
+        yield UInt16(self, "handle")
+        yield RECT16(self, "rect")
+        yield UInt16(self, "inch")
+        yield NullBytes(self, "reserved", 4)
+        yield textHandler(UInt16(self, "checksum"), hexadecimal)
+
+class EMF_Header(FieldSet):
+    MAGIC = "\x20\x45\x4D\x46\0\0"   # (magic, min_ver=0x0000)
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["size"].value * 8
+
+    def createFields(self):
+        LONG = Int32
+        yield UInt32(self, "type", "Record type (always 1)")
+        yield UInt32(self, "size", "Size of the header in bytes")
+        yield RECT32(self, "Bounds", "Inclusive bounds")
+        yield RECT32(self, "Frame", "Inclusive picture frame")
+        yield textHandler(UInt32(self, "signature", "Signature ID (always 0x464D4520)"), hexadecimal)
+        yield UInt16(self, "min_ver", "Minor version")
+        yield UInt16(self, "maj_ver", "Major version")
+        yield UInt32(self, "file_size", "Size of the file in bytes")
+        yield UInt32(self, "NumOfRecords", "Number of records in the metafile")
+        yield UInt16(self, "NumOfHandles", "Number of handles in the handle table")
+        yield NullBytes(self, "reserved", 2)
+        yield UInt32(self, "desc_size", "Size of description in 16-bit words")
+        yield UInt32(self, "desc_ofst", "Offset of description string in metafile")
+        yield UInt32(self, "nb_colors", "Number of color palette entries")
+        yield LONG(self, "width_px", "Width of reference device in pixels")
+        yield LONG(self, "height_px", "Height of reference device in pixels")
+        yield LONG(self, "width_mm", "Width of reference device in millimeters")
+        yield LONG(self, "height_mm", "Height of reference device in millimeters")
+
+        # Read description (if any)
+        offset = self["desc_ofst"].value
+        current = (self.absolute_address + self.current_size) // 8
+        size = self["desc_size"].value * 2
+        if offset == current and size:
+            yield String(self, "description", size, charset="UTF-16-LE", strip="\0 ")
+
+        # Read padding (if any)
+        size = self["size"].value - self.current_size//8
+        if size:
+            yield RawBytes(self, "padding", size)
+
+class WMF_File(Parser):
+    PARSER_TAGS = {
+        "id": "wmf",
+        "category": "image",
+        "file_ext": ("wmf", "apm", "emf"),
+        "mime": (
+            u"image/wmf", u"image/x-wmf", u"image/x-win-metafile",
+            u"application/x-msmetafile", u"application/wmf", u"application/x-wmf",
+            u"image/x-emf"),
+        "magic": (
+            (PlaceableHeader.MAGIC, 0),
+            (EMF_Header.MAGIC, 40*8),
+            # WMF: file_type=memory, header size=9, version=3.0
+            ("\0\0\x09\0\0\3", 0),
+            # WMF: file_type=disk, header size=9, version=3.0
+            ("\1\0\x09\0\0\3", 0),
+        ),
+        "min_size": 40*8,
+        "description": u"Microsoft Windows Metafile (WMF)",
+    }
+    endian = LITTLE_ENDIAN
+    FILE_TYPE = {0: "memory", 1: "disk"}
+
+    def validate(self):
+        if self.isEMF():
+            # Check EMF header
+            emf = self["emf_header"]
+            if emf["signature"].value != 0x464D4520:
+                return "Invalid signature"
+            if emf["type"].value != 1:
+                return "Invalid record type"
+            if emf["reserved"].value != "\0\0":
+                return "Invalid reserved"
+        else:
+            # Check AMF header
+            if self.isAPM():
+                amf = self["amf_header"]
+                if amf["handle"].value != 0:
+                    return "Invalid handle"
+                if amf["reserved"].value != "\0\0\0\0":
+                    return "Invalid reserved"
+
+            # Check common header
+            if self["file_type"].value not in (0, 1):
+                return "Invalid file type"
+            if self["header_size"].value != 9:
+                return "Invalid header size"
+            if self["nb_params"].value != 0:
+                return "Invalid number of parameters"
+
+        # Check first functions
+        for index in xrange(5):
+            try:
+                func = self["func[%u]" % index]
+            except MissingField:
+                if self.done:
+                    return True
+                return "Unable to get function #%u" % index
+            except ParserError:
+                return "Unable to create function #%u" % index
+
+            # Check first frame values
+            if not func.isValid():
+                return "Function #%u is invalid" % index
+        return True
+
+    def createFields(self):
+        if self.isEMF():
+            yield EMF_Header(self, "emf_header")
+        else:
+            if self.isAPM():
+                yield PlaceableHeader(self, "amf_header")
+            yield Enum(UInt16(self, "file_type"), self.FILE_TYPE)
+            yield UInt16(self, "header_size", "Size of header in 16-bit words (always 9)")
+            yield UInt8(self, "win_ver_min", "Minor version of Microsoft Windows")
+            yield UInt8(self, "win_ver_maj", "Major version of Microsoft Windows")
+            yield UInt32(self, "file_size", "Total size of the metafile in 16-bit words")
+            yield UInt16(self, "nb_obj", "Number of objects in the file")
+            yield UInt32(self, "max_record_size", "The size of largest record in 16-bit words")
+            yield UInt16(self, "nb_params", "Not Used (always 0)")
+
+        while not(self.eof):
+            yield Function(self, "func[]")
+
+    def isEMF(self):
+        """File is in EMF format?"""
+        if 1 <= self.current_length:
+            return self[0].name == "emf_header"
+        if self.size < 44*8:
+            return False
+        magic = EMF_Header.MAGIC
+        return self.stream.readBytes(40*8, len(magic)) == magic
+
+    def isAPM(self):
+        """File is in Aldus Placeable Metafiles format?"""
+        if 1 <= self.current_length:
+            return self[0].name == "amf_header"
+        else:
+            magic = PlaceableHeader.MAGIC
+            return (self.stream.readBytes(0, len(magic)) == magic)
+
+    def createDescription(self):
+        if self.isEMF():
+            return u"Microsoft Enhanced Metafile (EMF) picture"
+        elif self.isAPM():
+            return u"Aldus Placeable Metafile (APM) picture"
+        else:
+            return u"Microsoft Windows Metafile (WMF) picture"
+
+    def createMimeType(self):
+        if self.isEMF():
+            return u"image/x-emf"
+        else:
+            return u"image/wmf"
+
+    def createContentSize(self):
+        if self.isEMF():
+            return None
+        start = self["func[0]"].absolute_address
+        end = self.stream.searchBytes("\3\0\0\0\0\0", start, MAX_FILESIZE * 8)
+        if end is not None:
+            return end + 6*8
+        return None
+
diff --git a/lib/hachoir_parser/image/xcf.py b/lib/hachoir_parser/image/xcf.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0bfa30ca4382e87ab0e26b7d7e1a83eb2d70729
--- /dev/null
+++ b/lib/hachoir_parser/image/xcf.py
@@ -0,0 +1,331 @@
+"""
+Gimp image parser (XCF file, ".xcf" extension).
+
+You can find informations about XCF file in Gimp source code. URL to read
+CVS online:
+  http://cvs.gnome.org/viewcvs/gimp/app/xcf/
+  \--> files xcf-read.c and xcf-load.c
+
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (StaticFieldSet, FieldSet, ParserError,
+    UInt8, UInt32, Enum, Float32, String, PascalString32, RawBytes)
+from hachoir_parser.image.common import RGBA
+from hachoir_core.endian import NETWORK_ENDIAN
+
+class XcfCompression(FieldSet):
+    static_size = 8
+    COMPRESSION_NAME = {
+        0: u"None",
+        1: u"RLE",
+        2: u"Zlib",
+        3: u"Fractal"
+    }
+
+    def createFields(self):
+        yield Enum(UInt8(self, "compression",  "Compression method"), self.COMPRESSION_NAME)
+
+class XcfResolution(StaticFieldSet):
+    format = (
+        (Float32, "xres", "X resolution in DPI"),
+        (Float32, "yres", "Y resolution in DPI")
+    )
+
+class XcfTattoo(StaticFieldSet):
+    format = ((UInt32, "tattoo", "Tattoo"),)
+
+class LayerOffsets(StaticFieldSet):
+    format = (
+        (UInt32, "ofst_x", "Offset X"),
+        (UInt32, "ofst_y", "Offset Y")
+    )
+
+class LayerMode(FieldSet):
+    static_size = 32
+    MODE_NAME = {
+         0: u"Normal",
+         1: u"Dissolve",
+         2: u"Behind",
+         3: u"Multiply",
+         4: u"Screen",
+         5: u"Overlay",
+         6: u"Difference",
+         7: u"Addition",
+         8: u"Subtract",
+         9: u"Darken only",
+        10: u"Lighten only",
+        11: u"Hue",
+        12: u"Saturation",
+        13: u"Color",
+        14: u"Value",
+        15: u"Divide",
+        16: u"Dodge",
+        17: u"Burn",
+        18: u"Hard light",
+        19: u"Soft light",
+        20: u"Grain extract",
+        21: u"Grain merge",
+        22: u"Color erase"
+    }
+
+    def createFields(self):
+        yield Enum(UInt32(self, "mode", "Layer mode"), self.MODE_NAME)
+
+class GimpBoolean(UInt32):
+    def __init__(self, parent, name):
+        UInt32.__init__(self, parent, name)
+
+    def createValue(self):
+        return 1 == UInt32.createValue(self)
+
+class XcfUnit(StaticFieldSet):
+    format = ((UInt32, "unit", "Unit"),)
+
+class XcfParasiteEntry(FieldSet):
+    def createFields(self):
+        yield PascalString32(self, "name", "Name", strip="\0", charset="UTF-8")
+        yield UInt32(self, "flags", "Flags")
+        yield PascalString32(self, "data", "Data", strip=" \0", charset="UTF-8")
+
+class XcfLevel(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "width", "Width in pixel")
+        yield UInt32(self, "height", "Height in pixel")
+        yield UInt32(self, "offset", "Offset")
+        offset = self["offset"].value
+        if offset == 0:
+            return
+        data_offsets = []
+        while (self.absolute_address + self.current_size)/8 < offset:
+            chunk = UInt32(self, "data_offset[]", "Data offset")
+            yield chunk
+            if chunk.value == 0:
+                break
+            data_offsets.append(chunk)
+        if (self.absolute_address + self.current_size)/8 != offset:
+            raise ParserError("Problem with level offset.")
+        previous = offset
+        for chunk in data_offsets:
+            data_offset = chunk.value
+            size = data_offset - previous
+            yield RawBytes(self, "data[]", size, "Data content of %s" % chunk.name)
+            previous = data_offset
+
+class XcfHierarchy(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "width", "Width")
+        yield UInt32(self, "height", "Height")
+        yield UInt32(self, "bpp", "Bits/pixel")
+
+        offsets = []
+        while True:
+            chunk = UInt32(self, "offset[]", "Level offset")
+            yield chunk
+            if chunk.value == 0:
+                break
+            offsets.append(chunk.value)
+        for offset in offsets:
+            padding = self.seekByte(offset, relative=False)
+            if padding is not None:
+                yield padding
+            yield XcfLevel(self, "level[]", "Level")
+#        yield XcfChannel(self, "channel[]", "Channel"))
+
+class XcfChannel(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "width", "Channel width")
+        yield UInt32(self, "height", "Channel height")
+        yield PascalString32(self, "name", "Channel name", strip="\0", charset="UTF-8")
+        for field in readProperties(self):
+            yield field
+        yield UInt32(self, "hierarchy_ofs", "Hierarchy offset")
+        yield XcfHierarchy(self, "hierarchy", "Hierarchy")
+
+    def createDescription(self):
+         return 'Channel "%s"' % self["name"].value
+
+class XcfLayer(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "width", "Layer width in pixels")
+        yield UInt32(self, "height", "Layer height in pixels")
+        yield Enum(UInt32(self, "type", "Layer type"), XcfFile.IMAGE_TYPE_NAME)
+        yield PascalString32(self, "name", "Layer name", strip="\0", charset="UTF-8")
+        for prop in readProperties(self):
+            yield prop
+
+        # --
+        # TODO: Hack for Gimp 1.2 files
+        # --
+
+        yield UInt32(self, "hierarchy_ofs", "Hierarchy offset")
+        yield UInt32(self, "mask_ofs", "Layer mask offset")
+        padding = self.seekByte(self["hierarchy_ofs"].value, relative=False)
+        if padding is not None:
+            yield padding
+        yield XcfHierarchy(self, "hierarchy", "Hierarchy")
+        # TODO: Read layer mask if needed: self["mask_ofs"].value != 0
+
+    def createDescription(self):
+        return 'Layer "%s"' % self["name"].value
+
+class XcfParasites(FieldSet):
+    def createFields(self):
+        size = self["../size"].value * 8
+        while self.current_size < size:
+            yield XcfParasiteEntry(self, "parasite[]", "Parasite")
+
+class XcfProperty(FieldSet):
+    PROP_COMPRESSION = 17
+    PROP_RESOLUTION = 19
+    PROP_PARASITES = 21
+    TYPE_NAME = {
+         0: u"End",
+         1: u"Colormap",
+         2: u"Active layer",
+         3: u"Active channel",
+         4: u"Selection",
+         5: u"Floating selection",
+         6: u"Opacity",
+         7: u"Mode",
+         8: u"Visible",
+         9: u"Linked",
+        10: u"Lock alpha",
+        11: u"Apply mask",
+        12: u"Edit mask",
+        13: u"Show mask",
+        14: u"Show masked",
+        15: u"Offsets",
+        16: u"Color",
+        17: u"Compression",
+        18: u"Guides",
+        19: u"Resolution",
+        20: u"Tattoo",
+        21: u"Parasites",
+        22: u"Unit",
+        23: u"Paths",
+        24: u"User unit",
+        25: u"Vectors",
+        26: u"Text layer flags",
+    }
+
+    handler = {
+         6: RGBA,
+         7: LayerMode,
+         8: GimpBoolean,
+         9: GimpBoolean,
+        10: GimpBoolean,
+        11: GimpBoolean,
+        12: GimpBoolean,
+        13: GimpBoolean,
+        15: LayerOffsets,
+        17: XcfCompression,
+        19: XcfResolution,
+        20: XcfTattoo,
+        21: XcfParasites,
+        22: XcfUnit
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = (8 + self["size"].value) * 8
+
+    def createFields(self):
+        yield Enum(UInt32(self, "type",  "Property type"), self.TYPE_NAME)
+        yield UInt32(self, "size", "Property size")
+
+        size = self["size"].value
+        if 0 < size:
+            cls = self.handler.get(self["type"].value, None)
+            if cls:
+                yield cls(self, "data", size=size*8)
+            else:
+                yield RawBytes(self, "data", size, "Data")
+
+    def createDescription(self):
+        return "Property: %s" % self["type"].display
+
+def readProperties(parser):
+    while True:
+        prop = XcfProperty(parser, "property[]")
+        yield prop
+        if prop["type"].value == 0:
+            return
+
+class XcfFile(Parser):
+    PARSER_TAGS = {
+        "id": "xcf",
+        "category": "image",
+        "file_ext": ("xcf",),
+        "mime": (u"image/x-xcf", u"application/x-gimp-image"),
+        "min_size": (26 + 8 + 4 + 4)*8, # header+empty property+layer offset+channel offset
+        "magic": (
+            ('gimp xcf file\0', 0),
+            ('gimp xcf v002\0', 0),
+        ),
+        "description": "Gimp (XCF) picture"
+    }
+    endian = NETWORK_ENDIAN
+    IMAGE_TYPE_NAME = {
+        0: u"RGB",
+        1: u"Gray",
+        2: u"Indexed"
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, 14) not in ('gimp xcf file\0', 'gimp xcf v002\0'):
+            return "Wrong signature"
+        return True
+
+    def createFields(self):
+        # Read signature
+        yield String(self, "signature", 14,  "Gimp picture signature (ends with nul byte)", charset="ASCII")
+
+        # Read image general informations (width, height, type)
+        yield UInt32(self, "width", "Image width")
+        yield UInt32(self, "height", "Image height")
+        yield Enum(UInt32(self, "type", "Image type"), self.IMAGE_TYPE_NAME)
+        for prop in readProperties(self):
+            yield prop
+
+        # Read layer offsets
+        layer_offsets = []
+        while True:
+            chunk = UInt32(self, "layer_offset[]", "Layer offset")
+            yield chunk
+            if chunk.value == 0:
+                break
+            layer_offsets.append(chunk.value)
+
+        # Read channel offsets
+        channel_offsets = []
+        while True:
+            chunk = UInt32(self, "channel_offset[]", "Channel offset")
+            yield chunk
+            if chunk.value == 0:
+                break
+            channel_offsets.append(chunk.value)
+
+        # Read layers
+        for index, offset in enumerate(layer_offsets):
+            if index+1 < len(layer_offsets):
+                size = (layer_offsets[index+1] - offset) * 8
+            else:
+                size = None
+            padding = self.seekByte(offset, relative=False)
+            if padding:
+                yield padding
+            yield XcfLayer(self, "layer[]", size=size)
+
+        # Read channels
+        for index, offset in enumerate(channel_offsets):
+            if index+1 < len(channel_offsets):
+                size = (channel_offsets[index+1] - offset) * 8
+            else:
+                size = None
+            padding = self.seekByte(offset, relative=False)
+            if padding is not None:
+                yield padding
+            yield XcfChannel(self, "channel[]", "Channel", size=size)
+
diff --git a/lib/hachoir_parser/misc/__init__.py b/lib/hachoir_parser/misc/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..10e98bb2f4020e721d9a74891ad6989244cefadd
--- /dev/null
+++ b/lib/hachoir_parser/misc/__init__.py
@@ -0,0 +1,14 @@
+from hachoir_parser.misc.file_3do import File3do
+from hachoir_parser.misc.file_3ds import File3ds
+from hachoir_parser.misc.torrent import TorrentFile
+from hachoir_parser.misc.ttf import TrueTypeFontFile
+from hachoir_parser.misc.chm import ChmFile
+from hachoir_parser.misc.lnk import LnkFile
+from hachoir_parser.misc.pcf import PcfFile
+from hachoir_parser.misc.ole2 import OLE2_File
+from hachoir_parser.misc.pdf import PDFDocument
+from hachoir_parser.misc.pifv import PIFVFile
+from hachoir_parser.misc.hlp import HlpFile
+from hachoir_parser.misc.gnome_keyring import GnomeKeyring
+from hachoir_parser.misc.bplist import BPList
+
diff --git a/lib/hachoir_parser/misc/bplist.py b/lib/hachoir_parser/misc/bplist.py
new file mode 100644
index 0000000000000000000000000000000000000000..c46345e37ebd1513beb532155f61c904c4cccaa0
--- /dev/null
+++ b/lib/hachoir_parser/misc/bplist.py
@@ -0,0 +1,292 @@
+"""
+Apple/NeXT Binary Property List (BPLIST) parser.
+
+Also includes a .createXML() function which produces an XML representation of the object.
+Note that it will discard unknown objects, nulls and fill values, but should work for most files.
+
+Documents:
+- CFBinaryPList.c
+  http://src.gnu-darwin.org/DarwinSourceArchive/expanded/CF/CF-299/Parsing.subproj/CFBinaryPList.c
+- ForFoundationOnly.h (for structure formats)
+  http://src.gnu-darwin.org/DarwinSourceArchive/expanded/CF/CF-299/Base.subproj/ForFoundationOnly.h
+- XML <-> BPList converter
+  http://scw.us/iPhone/plutil/plutil.pl
+Author: Robert Xiao
+Created: 2008-09-21
+"""
+
+from hachoir_parser import HachoirParser
+from hachoir_core.field import (RootSeekableFieldSet, FieldSet, Enum,
+Bits, GenericInteger, Float32, Float64, UInt8, UInt64, Bytes, NullBytes, RawBytes, String)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import displayHandler
+from hachoir_core.tools import humanDatetime
+from datetime import datetime, timedelta
+
+class BPListTrailer(FieldSet):
+    def createFields(self):
+        yield NullBytes(self, "unused", 6)
+        yield UInt8(self, "offsetIntSize", "Size (in bytes) of offsets in the offset table")
+        yield UInt8(self, "objectRefSize", "Size (in bytes) of object numbers in object references")
+        yield UInt64(self, "numObjects", "Number of objects in this file")
+        yield UInt64(self, "topObject", "Top-level object reference")
+        yield UInt64(self, "offsetTableOffset", "File offset to the offset table")
+
+    def createDescription(self):
+        return "Binary PList trailer"
+
+class BPListOffsetTable(FieldSet):
+    def createFields(self):
+        size = self["../trailer/offsetIntSize"].value*8
+        for i in range(self["../trailer/numObjects"].value):
+            yield Bits(self, "offset[]", size)
+
+class BPListSize(FieldSet):
+    def createFields(self):
+        yield Bits(self, "size", 4)
+        if self['size'].value == 0xF:
+            yield BPListObject(self, "fullsize")
+
+    def createValue(self):
+        if 'fullsize' in self:
+            return self['fullsize'].value
+        else:
+            return self['size'].value
+
+class BPListObjectRef(GenericInteger):
+    def __init__(self, parent, name, description=None):
+        size = parent['/trailer/objectRefSize'].value*8
+        GenericInteger.__init__(self, parent, name, False, size, description)
+
+    def getRef(self):
+        return self.parent['/object[' + str(self.value) + ']']
+
+    def createDisplay(self):
+        return self.getRef().display
+
+    def createXML(self, prefix=''):
+        return self.getRef().createXML(prefix)
+
+class BPListArray(FieldSet):
+    def __init__(self, parent, name, size, description=None):
+        FieldSet.__init__(self, parent, name, description=description)
+        self.numels = size
+
+    def createFields(self):
+        for i in range(self.numels):
+            yield BPListObjectRef(self, "ref[]")
+
+    def createValue(self):
+        return self.array('ref')
+
+    def createDisplay(self):
+        return '[' + ', '.join([x.display for x in self.value]) + ']'
+
+    def createXML(self,prefix=''):
+        return prefix + '<array>\n' + ''.join([x.createXML(prefix + '\t' ) + '\n' for x in self.value]) + prefix + '</array>'
+
+class BPListDict(FieldSet):
+    def __init__(self, parent, name, size, description=None):
+        FieldSet.__init__(self, parent, name, description=description)
+        self.numels = size
+
+    def createFields(self):
+        for i in range(self.numels):
+            yield BPListObjectRef(self, "keyref[]")
+        for i in range(self.numels):
+            yield BPListObjectRef(self, "valref[]")
+
+    def createValue(self):
+        return zip(self.array('keyref'),self.array('valref'))
+
+    def createDisplay(self):
+        return '{' + ', '.join(['%s: %s'%(k.display,v.display) for k,v in self.value]) + '}'
+
+    def createXML(self, prefix=''):
+        return prefix + '<dict>\n' + ''.join(['%s\t<key>%s</key>\n%s\n'%(prefix,k.getRef().value.encode('utf-8'),v.createXML(prefix + '\t')) for k,v in self.value]) + prefix + '</dict>'
+
+class BPListObject(FieldSet):
+    def createFields(self):
+        yield Enum(Bits(self, "marker_type", 4),
+                    {0: "Simple",
+                     1: "Int",
+                     2: "Real",
+                     3: "Date",
+                     4: "Data",
+                     5: "ASCII String",
+                     6: "UTF-16-BE String",
+                     8: "UID",
+                     10: "Array",
+                     13: "Dict",})
+        markertype = self['marker_type'].value
+        if markertype == 0:
+            # Simple (Null)
+            yield Enum(Bits(self, "value", 4),
+                        {0: "Null",
+                         8: "False",
+                         9: "True",
+                         15: "Fill Byte",})
+            if self['value'].display == "False":
+                self.xml=lambda prefix:prefix + "<false/>"
+            elif self['value'].display == "True":
+                self.xml=lambda prefix:prefix + "<true/>"
+            else:
+                self.xml=lambda prefix:prefix + ""
+
+        elif markertype == 1:
+            # Int
+            yield Bits(self, "size", 4, "log2 of number of bytes")
+            size=self['size'].value
+            # 8-bit (size=0), 16-bit (size=1) and 32-bit (size=2) numbers are unsigned
+            # 64-bit (size=3) numbers are signed
+            yield GenericInteger(self, "value", (size>=3), (2**size)*8)
+            self.xml=lambda prefix:prefix + "<integer>%s</integer>"%self['value'].value
+
+        elif markertype == 2:
+            # Real
+            yield Bits(self, "size", 4, "log2 of number of bytes")
+            if self['size'].value == 2: # 2**2 = 4 byte float
+                yield Float32(self, "value")
+            elif self['size'].value == 3: # 2**3 = 8 byte float
+                yield Float64(self, "value")
+            else:
+                # FIXME: What is the format of the real?
+                yield Bits(self, "value", (2**self['size'].value)*8)
+            self.xml=lambda prefix:prefix + "<real>%s</real>"%self['value'].value
+
+        elif markertype == 3:
+            # Date
+            yield Bits(self, "extra", 4, "Extra value, should be 3")
+            cvt_time=lambda v:datetime(2001,1,1) + timedelta(seconds=v)
+            yield displayHandler(Float64(self, "value"),lambda x:humanDatetime(cvt_time(x)))
+            self.xml=lambda prefix:prefix + "<date>%s</date>"%(cvt_time(self['value'].value).isoformat())
+
+        elif markertype == 4:
+            # Data
+            yield BPListSize(self, "size")
+            if self['size'].value:
+                yield Bytes(self, "value", self['size'].value)
+                self.xml=lambda prefix:prefix + "<data>\n%s\n%s</data>"%(self['value'].value.encode('base64').strip(),prefix)
+            else:
+                self.xml=lambda prefix:prefix + '<data></data>'
+
+        elif markertype == 5:
+            # ASCII String
+            yield BPListSize(self, "size")
+            if self['size'].value:
+                yield String(self, "value", self['size'].value, charset="ASCII")
+                self.xml=lambda prefix:prefix + "<string>%s</string>"%(self['value'].value.encode('iso-8859-1'))
+            else:
+                self.xml=lambda prefix:prefix + '<string></string>'
+
+        elif markertype == 6:
+            # UTF-16-BE String
+            yield BPListSize(self, "size")
+            if self['size'].value:
+                yield String(self, "value", self['size'].value*2, charset="UTF-16-BE")
+                self.xml=lambda prefix:prefix + "<string>%s</string>"%(self['value'].value.encode('utf-8'))
+            else:
+                self.xml=lambda prefix:prefix + '<string></string>'
+
+        elif markertype == 8:
+            # UID
+            yield Bits(self, "size", 4, "Number of bytes minus 1")
+            yield GenericInteger(self, "value", False, (self['size'].value + 1)*8)
+            self.xml=lambda prefix:prefix + "" # no equivalent?
+
+        elif markertype == 10:
+            # Array
+            yield BPListSize(self, "size")
+            size = self['size'].value
+            if size:
+                yield BPListArray(self, "value", size)
+                self.xml=lambda prefix:self['value'].createXML(prefix)
+
+        elif markertype == 13:
+            # Dict
+            yield BPListSize(self, "size")
+            yield BPListDict(self, "value", self['size'].value)
+            self.xml=lambda prefix:self['value'].createXML(prefix)
+
+        else:
+            yield Bits(self, "value", 4)
+            self.xml=lambda prefix:''
+
+    def createValue(self):
+        if 'value' in self:
+            return self['value'].value
+        elif self['marker_type'].value in [4,5,6]:
+            return u''
+        else:
+            return None
+
+    def createDisplay(self):
+        if 'value' in self:
+            return unicode(self['value'].display)
+        elif self['marker_type'].value in [4,5,6]:
+            return u''
+        else:
+            return None
+
+    def createXML(self, prefix=''):
+        if 'value' in self:
+            try:
+                return self.xml(prefix)
+            except AttributeError:
+                return ''
+        return ''
+
+    def getFieldType(self):
+        return '%s<%s>'%(FieldSet.getFieldType(self), self['marker_type'].display)
+
+class BPList(HachoirParser, RootSeekableFieldSet):
+    endian = BIG_ENDIAN
+    MAGIC = "bplist00"
+    PARSER_TAGS = {
+        "id": "bplist",
+        "category": "misc",
+        "file_ext": ("plist",),
+        "magic": ((MAGIC, 0),),
+        "min_size": 8 + 32, # bplist00 + 32-byte trailer
+        "description": "Apple/NeXT Binary Property List",
+    }
+
+    def __init__(self, stream, **args):
+        RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self))
+        HachoirParser.__init__(self, stream, **args)
+
+    def validate(self):
+        if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC:
+            return "Invalid magic"
+        return True
+
+    def createFields(self):
+        yield Bytes(self, "magic", 8, "File magic (bplist00)")
+        if self.size:
+            self.seekByte(self.size//8-32, True)
+        else:
+            # FIXME: UNTESTED
+            while True:
+                try:
+                    self.seekByte(1024)
+                except:
+                    break
+            self.seekByte(self.size//8-32)
+        yield BPListTrailer(self, "trailer")
+        self.seekByte(self['trailer/offsetTableOffset'].value)
+        yield BPListOffsetTable(self, "offset_table")
+        for i in self.array("offset_table/offset"):
+            if self.current_size > i.value*8:
+                self.seekByte(i.value)
+            elif self.current_size < i.value*8:
+                # try to detect files with gaps or unparsed content
+                yield RawBytes(self, "padding[]", i.value-self.current_size//8)
+            yield BPListObject(self, "object[]")
+
+    def createXML(self, prefix=''):
+        return '''<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+''' + self['/object[' + str(self['/trailer/topObject'].value) + ']'].createXML(prefix) + '''
+</plist>'''
+
diff --git a/lib/hachoir_parser/misc/chm.py b/lib/hachoir_parser/misc/chm.py
new file mode 100644
index 0000000000000000000000000000000000000000..6bff555098b99696b1f60495873ab8932b60e776
--- /dev/null
+++ b/lib/hachoir_parser/misc/chm.py
@@ -0,0 +1,200 @@
+"""
+InfoTech Storage Format (ITSF) parser, used by Microsoft's HTML Help (.chm)
+
+Document:
+- Microsoft's HTML Help (.chm) format
+  http://www.wotsit.org (search "chm")
+- chmlib library
+  http://www.jedrea.com/chmlib/
+
+Author: Victor Stinner
+Creation date: 2007-03-04
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (Field, FieldSet, ParserError,
+    Int32, UInt32, UInt64,
+    RawBytes, PaddingBytes,
+    Enum, String)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_parser.common.win32 import GUID
+from hachoir_parser.common.win32_lang_id import LANGUAGE_ID
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+
+class CWord(Field):
+    """
+    Compressed double-word
+    """
+    def __init__(self, parent, name, description=None):
+        Field.__init__(self, parent, name, 8, description)
+
+        endian = self._parent.endian
+        stream = self._parent.stream
+        addr = self.absolute_address
+
+        value = 0
+        byte = stream.readBits(addr, 8, endian)
+        while byte & 0x80:
+            value <<= 7
+            value += (byte & 0x7f)
+            self._size += 8
+            if 64 < self._size:
+                raise ParserError("CHM: CWord is limited to 64 bits")
+            addr += 8
+            byte = stream.readBits(addr, 8, endian)
+        value += byte
+        self.createValue = lambda: value
+
+class Filesize_Header(FieldSet):
+    def createFields(self):
+        yield textHandler(UInt32(self, "unknown[]", "0x01FE"), hexadecimal)
+        yield textHandler(UInt32(self, "unknown[]", "0x0"), hexadecimal)
+        yield filesizeHandler(UInt64(self, "file_size"))
+        yield textHandler(UInt32(self, "unknown[]", "0x0"), hexadecimal)
+        yield textHandler(UInt32(self, "unknown[]", "0x0"), hexadecimal)
+
+class ITSP(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["size"].value * 8
+
+    def createFields(self):
+        yield String(self, "magic", 4, "ITSP", charset="ASCII")
+        yield UInt32(self, "version", "Version (=1)")
+        yield filesizeHandler(UInt32(self, "size", "Length (in bytes) of the directory header (84)"))
+        yield UInt32(self, "unknown[]", "(=10)")
+        yield filesizeHandler(UInt32(self, "block_size", "Directory block size"))
+        yield UInt32(self, "density", "Density of quickref section, usually 2")
+        yield UInt32(self, "index_depth", "Depth of the index tree")
+        yield Int32(self, "nb_dir", "Chunk number of root index chunk")
+        yield UInt32(self, "first_pmgl", "Chunk number of first PMGL (listing) chunk")
+        yield UInt32(self, "last_pmgl", "Chunk number of last PMGL (listing) chunk")
+        yield Int32(self, "unknown[]", "-1")
+        yield UInt32(self, "nb_dir_chunk", "Number of directory chunks (total)")
+        yield Enum(UInt32(self, "lang_id", "Windows language ID"), LANGUAGE_ID)
+        yield GUID(self, "system_uuid", "{5D02926A-212E-11D0-9DF9-00A0C922E6EC}")
+        yield filesizeHandler(UInt32(self, "size2", "Same value than size"))
+        yield Int32(self, "unknown[]", "-1")
+        yield Int32(self, "unknown[]", "-1")
+        yield Int32(self, "unknown[]", "-1")
+
+class ITSF(FieldSet):
+    def createFields(self):
+        yield String(self, "magic", 4, "ITSF", charset="ASCII")
+        yield UInt32(self, "version")
+        yield UInt32(self, "header_size", "Total header length (in bytes)")
+        yield UInt32(self, "one")
+        yield UInt32(self, "last_modified")
+        yield Enum(UInt32(self, "lang_id", "Windows Language ID"), LANGUAGE_ID)
+        yield GUID(self, "dir_uuid", "{7C01FD10-7BAA-11D0-9E0C-00A0-C922-E6EC}")
+        yield GUID(self, "stream_uuid", "{7C01FD11-7BAA-11D0-9E0C-00A0-C922-E6EC}")
+        yield UInt64(self, "filesize_offset")
+        yield filesizeHandler(UInt64(self, "filesize_len"))
+        yield UInt64(self, "dir_offset")
+        yield filesizeHandler(UInt64(self, "dir_len"))
+        if 3 <= self["version"].value:
+            yield UInt64(self, "data_offset")
+
+class PMGL_Entry(FieldSet):
+    def createFields(self):
+        yield CWord(self, "name_len")
+        yield String(self, "name", self["name_len"].value, charset="UTF-8")
+        yield CWord(self, "space")
+        yield CWord(self, "start")
+        yield filesizeHandler(CWord(self, "length"))
+
+    def createDescription(self):
+        return "%s (%s)" % (self["name"].value, self["length"].display)
+
+class PMGL(FieldSet):
+    def createFields(self):
+        # Header
+        yield String(self, "magic", 4, "PMGL", charset="ASCII")
+        yield filesizeHandler(Int32(self, "free_space",
+            "Length of free space and/or quickref area at end of directory chunk"))
+        yield Int32(self, "unknown")
+        yield Int32(self, "previous", "Chunk number of previous listing chunk")
+        yield Int32(self, "next", "Chunk number of previous listing chunk")
+
+        # Entries
+        stop = self.size - self["free_space"].value * 8
+        while self.current_size < stop:
+            yield PMGL_Entry(self, "entry[]")
+
+        # Padding
+        padding = (self.size - self.current_size) // 8
+        if padding:
+            yield PaddingBytes(self, "padding", padding)
+
+class PMGI_Entry(FieldSet):
+    def createFields(self):
+        yield CWord(self, "name_len")
+        yield String(self, "name", self["name_len"].value, charset="UTF-8")
+        yield CWord(self, "page")
+
+    def createDescription(self):
+        return "%s (page #%u)" % (self["name"].value, self["page"].value)
+
+class PMGI(FieldSet):
+    def createFields(self):
+        yield String(self, "magic", 4, "PMGI", charset="ASCII")
+        yield filesizeHandler(UInt32(self, "free_space",
+            "Length of free space and/or quickref area at end of directory chunk"))
+
+        stop = self.size - self["free_space"].value * 8
+        while self.current_size < stop:
+            yield PMGI_Entry(self, "entry[]")
+
+        padding = (self.size - self.current_size) // 8
+        if padding:
+            yield PaddingBytes(self, "padding", padding)
+
+class Directory(FieldSet):
+    def createFields(self):
+        yield ITSP(self, "itsp")
+        block_size = self["itsp/block_size"].value * 8
+
+        nb_dir = self["itsp/nb_dir"].value
+
+        if nb_dir < 0:
+            nb_dir = 1
+        for index in xrange(nb_dir):
+            yield PMGL(self, "pmgl[]", size=block_size)
+
+        if self.current_size < self.size:
+            yield PMGI(self, "pmgi", size=block_size)
+
+class ChmFile(Parser):
+    PARSER_TAGS = {
+        "id": "chm",
+        "category": "misc",
+        "file_ext": ("chm",),
+        "min_size": 4*8,
+        "magic": (("ITSF\3\0\0\0", 0),),
+        "description": "Microsoft's HTML Help (.chm)",
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 4) != "ITSF":
+            return "Invalid magic"
+        if self["itsf/version"].value != 3:
+            return "Invalid version"
+        return True
+
+    def createFields(self):
+        yield ITSF(self, "itsf")
+        yield Filesize_Header(self, "file_size", size=self["itsf/filesize_len"].value*8)
+
+        padding = self.seekByte(self["itsf/dir_offset"].value)
+        if padding:
+            yield padding
+        yield Directory(self, "dir", size=self["itsf/dir_len"].value*8)
+
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield RawBytes(self, "raw_end", size)
+
+    def createContentSize(self):
+        return self["file_size/file_size"].value * 8
+
diff --git a/lib/hachoir_parser/misc/common.py b/lib/hachoir_parser/misc/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..38d9f823706bd1c72aa120cfc5d470110045ebff
--- /dev/null
+++ b/lib/hachoir_parser/misc/common.py
@@ -0,0 +1,13 @@
+from hachoir_core.field import StaticFieldSet, Float32
+
+class Vertex(StaticFieldSet):
+    format = ((Float32, "x"), (Float32, "y"), (Float32, "z"))
+
+    def createValue(self):
+        return (self["x"].value, self["y"].value, self["z"].value)
+
+class MapUV(StaticFieldSet):
+    format = ((Float32, "u"), (Float32, "v"))
+
+    def createValue(self):
+        return (self["u"].value, self["v"].value)
diff --git a/lib/hachoir_parser/misc/file_3do.py b/lib/hachoir_parser/misc/file_3do.py
new file mode 100644
index 0000000000000000000000000000000000000000..3108d0ae076e52070517f22ff8433d0c24bba06d
--- /dev/null
+++ b/lib/hachoir_parser/misc/file_3do.py
@@ -0,0 +1,214 @@
+# -*- coding: utf-8 -*-
+
+"""
+3do model parser.
+
+Author: Cyril Zorin
+Creation date: 28 september 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt32, Int32, String, Float32,
+    RawBytes, PaddingBytes)
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+from hachoir_parser.misc.common import Vertex, MapUV
+
+class Vector(FieldSet):
+    def __init__(self, parent, name,
+    count, type, ename, edesc=None, description=None):
+        FieldSet.__init__(self, parent, name, description)
+        self.count = count
+        self.type = type
+        self.ename = ename+"[]"
+        self.edesc = edesc
+        try:
+            item_size = self.type.static_size(self.ename, self.edesc)
+        except TypeError:
+            item_size = self.type.static_size
+        if item_size:
+            self._size = item_size * self.count
+
+    def createFields(self):
+        for index in xrange(self.count):
+            yield self.type(self, self.ename, self.edesc)
+
+class Face(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "id")
+        yield UInt32(self, "type")
+        yield UInt32(self, "geometry_mode")
+        yield UInt32(self, "lighting_mode")
+        yield UInt32(self, "texture_mode")
+        yield UInt32(self, "nvertices")
+        yield Float32(self, "unknown[]", "unknown")
+        yield UInt32(self, "has_texture", "Has texture?")
+        yield UInt32(self, "has_material", "Has material?")
+        yield Vertex(self, "unknown[]")
+        yield Float32(self, "extra_light")
+        yield Vertex(self, "unknown[]")
+        yield Vertex(self, "normal")
+        if self["nvertices"].value:
+            yield Vector(self, "vertex_indices",
+                self["nvertices"].value, UInt32, "vertex")
+        if self["has_texture"].value:
+            yield Vector(self, "texture_vertex_indices",
+                self["nvertices"].value, UInt32, "texture_vertex")
+        if self["has_material"].value:
+            yield UInt32(self, "material_index", "material index")
+
+    def createDescription(self):
+        return "Face: id=%s" % self["id"].value
+
+class Mesh(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+
+    def createFields(self):
+        yield String(self, "name", 32, strip="\0")
+        yield UInt32(self, "id")
+        yield UInt32(self, "geometry_mode")
+        yield UInt32(self, "lighting_mode")
+        yield UInt32(self, "texture_mode")
+        yield UInt32(self, "nmesh_vertices")
+        yield UInt32(self, "ntexture_vertices")
+        yield UInt32(self, "nfaces")
+
+        nb_vert = self["nmesh_vertices"].value
+        if nb_vert:
+            yield Vector(self, "vertices",
+                nb_vert, Vertex, "vertex")
+        if self["ntexture_vertices"].value:
+            yield Vector(self, "texture vertices",
+                self["ntexture_vertices"].value, MapUV, "texture_vertex")
+        if nb_vert:
+            yield Vector(self, "light vertices",
+                nb_vert, Float32, "extra_light")
+            yield Vector(self, "unknown[]",
+                nb_vert, Float32, "unknown")
+        if self["nfaces"].value:
+            yield Vector(self, "faces", self["nfaces"].value, Face, "face")
+        if nb_vert:
+            yield Vector(self, "vertex normals",
+                nb_vert, Vertex, "normal")
+
+        yield UInt32(self, "has_shadow")
+        yield Float32(self, "unknown[]")
+        yield Float32(self, "radius")
+        yield Vertex(self, "unknown[]")
+        yield Vertex(self, "unknown[]")
+
+    def createDescription(self):
+        return 'Mesh "%s" (id %s)' % (self["name"].value, self["id"].value)
+
+class Geoset(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "count")
+        for index in xrange(self["count"].value):
+            yield Mesh(self, "mesh[]")
+
+    def createDescription(self):
+        return "Set of %s meshes" % self["count"].value
+
+class Node(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        size = (188-4)*8
+        if self["parent_offset"].value != 0:
+            size += 32
+        if self["first_child_offset"].value != 0:
+            size += 32
+        if self["next_sibling_offset"].value != 0:
+            size += 32
+        self._size = size
+
+    def createFields(self):
+        yield String(self, "name", 32, strip="\0")
+        yield PaddingBytes(self, "unknown[]", 32, pattern="\xCC")
+        yield UInt32(self, "flags")
+        yield UInt32(self, "id")
+        yield UInt32(self, "type")
+        yield Int32(self, "mesh_id")
+        yield UInt32(self, "depth")
+        yield Int32(self, "parent_offset")
+        yield UInt32(self, "nchildren")
+        yield UInt32(self, "first_child_offset")
+        yield UInt32(self, "next_sibling_offset")
+        yield Vertex(self, "pivot")
+        yield Vertex(self, "position")
+        yield Float32(self, "pitch")
+        yield Float32(self, "yaw")
+        yield Float32(self, "roll")
+        for index in xrange(4):
+            yield Vertex(self, "unknown_vertex[]")
+        if self["parent_offset"].value != 0:
+            yield UInt32(self, "parent_id")
+        if self["first_child_offset"].value != 0:
+            yield UInt32(self, "first_child_id")
+        if self["next_sibling_offset"].value != 0:
+            yield UInt32(self, "next_sibling_id")
+
+    def createDescription(self):
+        return 'Node "%s"' % self["name"].value
+
+class Nodes(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "count")
+        for index in xrange(self["count"].value):
+            yield Node(self, "node[]")
+
+    def createDescription(self):
+        return 'Nodes (%s)' % self["count"].value
+
+class Materials(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        count = self["count"]
+        self._size = count.size + count.value * (32*8)
+
+    def createFields(self):
+        yield UInt32(self, "count")
+        for index in xrange(self["count"].value):
+            yield String(self, "filename[]", 32, "Material file name", strip="\0")
+
+    def createDescription(self):
+        return 'Material file names (%s)' % self["count"].value
+
+class File3do(Parser):
+    PARSER_TAGS = {
+        "id": "3do",
+        "category": "misc",
+        "file_ext": ("3do",),
+        "mime": (u"image/x-3do",),
+        "min_size": 8*4,
+        "description": "renderdroid 3d model."
+    }
+
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        signature = self.stream.readBytes(0, 4)
+        return signature in ('LDOM', 'MODL') # lazy endian-safe hack =D
+
+    def createFields(self):
+        # Read file signature, and fix endian if needed
+        yield String(self, "file_sig", 4, "File signature", charset="ASCII")
+        if self["file_sig"].value == "MODL":
+            self.endian = BIG_ENDIAN
+
+        # Read file content
+        yield Materials(self, "materials")
+        yield String(self, "model_name", 32, "model file name", strip="\0")
+        yield RawBytes(self, "unknown[]", 4)
+        yield UInt32(self, "ngeosets")
+        for index in xrange(self["ngeosets"].value):
+            yield Geoset(self, "geoset[]")
+        yield RawBytes(self, "unknown[]", 4)
+        yield Nodes(self, "nodes")
+        yield Float32(self, "model_radius")
+        yield Vertex(self, "insertion_offset")
+
+        # Read the end of the file
+        if self.current_size < self._size:
+            yield self.seekBit(self._size, "end")
+
diff --git a/lib/hachoir_parser/misc/file_3ds.py b/lib/hachoir_parser/misc/file_3ds.py
new file mode 100644
index 0000000000000000000000000000000000000000..aaf4fbf491afd10a02cd94d24932f4fb34945d66
--- /dev/null
+++ b/lib/hachoir_parser/misc/file_3ds.py
@@ -0,0 +1,177 @@
+"""
+3D Studio Max file (.3ds) parser.
+Author: Victor Stinner
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (StaticFieldSet, FieldSet,
+    UInt16, UInt32, RawBytes, Enum, CString)
+from hachoir_parser.image.common import RGB
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_parser.misc.common import Vertex, MapUV
+
+def readObject(parent):
+    yield CString(parent, "name", "Object name")
+    size = parent["size"].value * 8
+    while parent.current_size < size:
+        yield Chunk(parent, "chunk[]")
+
+def readTextureFilename(parent):
+    yield CString(parent, "filename", "Texture filename")
+
+def readVersion(parent):
+    yield UInt32(parent, "version", "3DS file format version")
+
+def readMaterialName(parent):
+    yield CString(parent, "name", "Material name")
+
+class Polygon(StaticFieldSet):
+    format = (
+        (UInt16, "a", "Vertex A"),
+        (UInt16, "b", "Vertex B"),
+        (UInt16, "c", "Vertex C"),
+        (UInt16, "flags", "Flags"))
+
+def readMapList(parent):
+    yield UInt16(parent, "count", "Map count")
+    for index in xrange(parent["count"].value):
+        yield MapUV(parent, "map_uv[]", "Mapping UV")
+
+def readColor(parent):
+    yield RGB(parent, "color")
+
+def readVertexList(parent):
+    yield UInt16(parent, "count", "Vertex count")
+    for index in range(0, parent["count"].value):
+        yield Vertex(parent, "vertex[]", "Vertex")
+
+def readPolygonList(parent):
+    count = UInt16(parent, "count", "Vertex count")
+    yield count
+    for i in range(0, count.value):
+        yield Polygon(parent, "polygon[]")
+    size = parent["size"].value * 8
+    while parent.current_size < size:
+        yield Chunk(parent, "chunk[]")
+
+class Chunk(FieldSet):
+    # List of chunk type name
+    type_name = {
+        0x0011: "Color",
+        0x4D4D: "Main chunk",
+        0x0002: "File version",
+        0x3D3D: "Materials and objects",
+        0x4000: "Object",
+        0x4100: "Mesh (triangular)",
+        0x4110: "Vertices list",
+        0x4120: "Polygon (faces) list",
+        0x4140: "Map UV list",
+        0x4130: "Object material",
+        0xAFFF: "New material",
+        0xA000: "Material name",
+        0xA010: "Material ambient",
+        0xA020: "Material diffuse",
+        0xA030: "Texture specular",
+        0xA200: "Texture",
+        0xA300: "Texture filename",
+
+        # Key frames
+        0xB000: "Keyframes",
+        0xB002: "Object node tag",
+        0xB006: "Light target node tag",
+        0xB007: "Spot light node tag",
+        0xB00A: "Keyframes header",
+        0xB009: "Keyframe current time",
+        0xB030: "Node identifier",
+        0xB010: "Node header",
+        0x7001: "Viewport layout"
+    }
+
+    chunk_id_by_type = {
+        0x4d4d: "main",
+        0x0002: "version",
+        0x3d3d: "obj_mat",
+        0xb000: "keyframes",
+        0xafff: "material[]",
+        0x4000: "object[]",
+        0x4110: "vertices_list",
+        0x4120: "polygon_list",
+        0x4140: "mapuv_list",
+        0x4100: "mesh"
+    }
+
+    # List of chunks which contains other chunks
+    sub_chunks = \
+        (0x4D4D, 0x4100, 0x3D3D, 0xAFFF, 0xA200,
+         0xB002, 0xB006, 0xB007,
+         0xA010, 0xA030, 0xA020, 0xB000)
+
+    # List of chunk type handlers
+    handlers = {
+        0xA000: readMaterialName,
+        0x4000: readObject,
+        0xA300: readTextureFilename,
+        0x0011: readColor,
+        0x0002: readVersion,
+        0x4110: readVertexList,
+        0x4120: readPolygonList,
+        0x4140: readMapList
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+
+        # Set description
+        self._description = "Chunk: %s" % self["type"].display
+
+        # Set name based on type field
+        type = self["type"].value
+        if type in Chunk.chunk_id_by_type:
+            self._name = Chunk.chunk_id_by_type[type]
+        else:
+            self._name = "chunk_%04x" % type
+
+        # Guess chunk size
+        self._size = self["size"].value * 8
+
+    def createFields(self):
+        yield Enum(textHandler(UInt16(self, "type", "Chunk type"), hexadecimal), Chunk.type_name)
+        yield UInt32(self, "size", "Chunk size (in bytes)")
+        content_size = self["size"].value - 6
+        if content_size == 0:
+            return
+        type = self["type"].value
+        if type in Chunk.sub_chunks:
+            while self.current_size < self.size:
+                yield Chunk(self, "chunk[]")
+        else:
+            if type in Chunk.handlers:
+                fields = Chunk.handlers[type] (self)
+                for field in fields:
+                    yield field
+            else:
+                yield RawBytes(self, "data", content_size)
+
+class File3ds(Parser):
+    endian = LITTLE_ENDIAN
+    PARSER_TAGS = {
+        "id": "3ds",
+        "category": "misc",
+        "file_ext": ("3ds",),
+        "mime": (u"image/x-3ds",),
+        "min_size": 16*8,
+        "description": "3D Studio Max model"
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, 2) != "MM":
+            return "Wrong signature"
+        if self["main/version/version"].value not in (2, 3):
+            return "Unknown format version"
+        return True
+
+    def createFields(self):
+        while not self.eof:
+            yield Chunk(self, "chunk[]")
+
diff --git a/lib/hachoir_parser/misc/gnome_keyring.py b/lib/hachoir_parser/misc/gnome_keyring.py
new file mode 100644
index 0000000000000000000000000000000000000000..0bade36f5ea26fe56747b0048560890df3ed7c70
--- /dev/null
+++ b/lib/hachoir_parser/misc/gnome_keyring.py
@@ -0,0 +1,200 @@
+"""
+Gnome keyring parser.
+
+Sources:
+ - Gnome Keyring source code,
+   function generate_file() in keyrings/gkr-keyring.c,
+
+Author: Victor Stinner
+Creation date: 2008-04-09
+"""
+
+from hachoir_core.tools import paddingSize
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    Bit, NullBits, NullBytes,
+    UInt8, UInt32, String, RawBytes, Enum,
+    TimestampUnix64, CompressedField,
+    SubFile)
+from hachoir_core.endian import BIG_ENDIAN
+
+try:
+    import hashlib
+    def sha256(data):
+        hash = hashlib.new('sha256')
+        hash.update(data)
+        return hash.digest()
+except ImportError:
+    def sha256(data):
+        raise ImportError("hashlib module is missing")
+
+try:
+    from Crypto.Cipher import AES
+    class DeflateStream:
+        def __init__(self, stream):
+            hash_iterations = 1234
+            password = "x" * 8
+            salt = "\0" * 8
+            key, iv = generate_key(password, salt, hash_iterations)
+            self.cipher = AES.new(key, AES.MODE_CBC, iv)
+
+        def __call__(self, size, data=None):
+            if data is None:
+                return ''
+            return self.cipher.decrypt(data)
+
+    def Deflate(field):
+        CompressedField(field, DeflateStream)
+        return field
+except ImportError:
+    def Deflate(field):
+        return field
+
+class KeyringString(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "length")
+        length = self["length"].value
+        if length == 0xffffffff:
+            return
+        yield String(self, "text", length, charset="UTF-8")
+
+    def createValue(self):
+        if "text" in self:
+            return self["text"].value
+        else:
+            return u''
+
+    def createDescription(self):
+        if "text" in self:
+            return self["text"].value
+        else:
+            return u"(empty string)"
+
+class Attribute(FieldSet):
+    def createFields(self):
+        yield KeyringString(self, "name")
+        yield UInt32(self, "type")
+        type = self["type"].value
+        if type == 0:
+            yield KeyringString(self, "value")
+        elif type == 1:
+            yield UInt32(self, "value")
+        else:
+            raise TypeError("Unknown attribute type (%s)" % type)
+
+    def createDescription(self):
+        return 'Attribute "%s"' % self["name"].value
+
+class ACL(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "types_allowed")
+        yield KeyringString(self, "display_name")
+        yield KeyringString(self, "pathname")
+        yield KeyringString(self, "reserved[]")
+        yield UInt32(self, "reserved[]")
+
+class Item(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "id")
+        yield UInt32(self, "type")
+        yield UInt32(self, "attr_count")
+        for index in xrange(self["attr_count"].value):
+            yield Attribute(self, "attr[]")
+
+    def createDescription(self):
+        return "Item #%s: %s attributes" % (self["id"].value, self["attr_count"].value)
+
+class Items(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "count")
+        for index in xrange(self["count"].value):
+            yield Item(self, "item[]")
+
+class EncryptedItem(FieldSet):
+    def createFields(self):
+        yield KeyringString(self, "display_name")
+        yield KeyringString(self, "secret")
+        yield TimestampUnix64(self, "mtime")
+        yield TimestampUnix64(self, "ctime")
+        yield KeyringString(self, "reserved[]")
+        for index in xrange(4):
+            yield UInt32(self, "reserved[]")
+        yield UInt32(self, "attr_count")
+        for index in xrange(self["attr_count"].value):
+            yield Attribute(self, "attr[]")
+        yield UInt32(self, "acl_count")
+        for index in xrange(self["acl_count"].value):
+            yield ACL(self, "acl[]")
+#        size = 8 # paddingSize((self.stream.size - self.current_size) // 8, 16)
+#        if size:
+#            yield NullBytes(self, "hash_padding", size, "16 bytes alignment")
+
+class EncryptedData(Parser):
+    PARSER_TAGS = {
+        "id": "gnomeencryptedkeyring",
+        "min_size": 16*8,
+        "description": u"Gnome encrypted keyring",
+    }
+    endian = BIG_ENDIAN
+    def validate(self):
+        return True
+
+    def createFields(self):
+        yield RawBytes(self, "md5", 16)
+        while True:
+            size = (self.size - self.current_size) // 8
+            if size < 77:
+                break
+            yield EncryptedItem(self, "item[]")
+        size = paddingSize(self.current_size // 8, 16)
+        if size:
+            yield NullBytes(self, "padding_align", size)
+
+class GnomeKeyring(Parser):
+    MAGIC = "GnomeKeyring\n\r\0\n"
+    PARSER_TAGS = {
+        "id": "gnomekeyring",
+        "category": "misc",
+        "magic": ((MAGIC, 0),),
+        "min_size": 47*8,
+        "description": u"Gnome keyring",
+    }
+    CRYPTO_NAMES = {
+        0: u"AEL",
+    }
+    HASH_NAMES = {
+        0: u"MD5",
+    }
+
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC:
+            return u"Invalid magic string"
+        return True
+
+    def createFields(self):
+        yield String(self, "magic", len(self.MAGIC), 'Magic string (%r)' % self.MAGIC, charset="ASCII")
+        yield UInt8(self, "major_version")
+        yield UInt8(self, "minor_version")
+        yield Enum(UInt8(self, "crypto"), self.CRYPTO_NAMES)
+        yield Enum(UInt8(self, "hash"), self.HASH_NAMES)
+        yield KeyringString(self, "keyring_name")
+        yield TimestampUnix64(self, "mtime")
+        yield TimestampUnix64(self, "ctime")
+        yield Bit(self, "lock_on_idle")
+        yield NullBits(self, "reserved[]", 31, "Reserved for future flags")
+        yield UInt32(self, "lock_timeout")
+        yield UInt32(self, "hash_iterations")
+        yield RawBytes(self, "salt", 8)
+        yield NullBytes(self, "reserved[]", 16)
+        yield Items(self, "items")
+        yield UInt32(self, "encrypted_size")
+        yield Deflate(SubFile(self, "encrypted", self["encrypted_size"].value, "AES128 CBC", parser_class=EncryptedData))
+
+def generate_key(password, salt, hash_iterations):
+    sha = sha256(password+salt)
+    for index in xrange(hash_iterations-1):
+        sha = sha256(sha)
+    return sha[:16], sha[16:]
+
diff --git a/lib/hachoir_parser/misc/hlp.py b/lib/hachoir_parser/misc/hlp.py
new file mode 100644
index 0000000000000000000000000000000000000000..167dc7a6be21b27e3619616d821adc7fb8aaf4b6
--- /dev/null
+++ b/lib/hachoir_parser/misc/hlp.py
@@ -0,0 +1,76 @@
+"""
+Microsoft Windows Help (HLP) parser for Hachoir project.
+
+Documents:
+- Windows Help File Format / Annotation File Format / SHG and MRB File Format
+  written by M. Winterhoff (100326.2776@compuserve.com)
+  found on http://www.wotsit.org/
+
+Author: Victor Stinner
+Creation date: 2007-09-03
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    Bits, Int32, UInt16, UInt32,
+    NullBytes, RawBytes, PaddingBytes, String)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import (textHandler, hexadecimal,
+    displayHandler, humanFilesize)
+
+class FileEntry(FieldSet):
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = self["res_space"].value * 8
+
+    def createFields(self):
+        yield displayHandler(UInt32(self, "res_space", "Reserved space"), humanFilesize)
+        yield displayHandler(UInt32(self, "used_space", "Used space"), humanFilesize)
+        yield Bits(self, "file_flags", 8, "(=4)")
+
+        yield textHandler(UInt16(self, "magic"), hexadecimal)
+        yield Bits(self, "flags", 16)
+        yield displayHandler(UInt16(self, "page_size", "Page size in bytes"), humanFilesize)
+        yield String(self, "structure", 16, strip="\0", charset="ASCII")
+        yield NullBytes(self, "zero", 2)
+        yield UInt16(self, "nb_page_splits", "Number of page splits B+ tree has suffered")
+        yield UInt16(self, "root_page", "Page number of B+ tree root page")
+        yield PaddingBytes(self, "one", 2, pattern="\xFF")
+        yield UInt16(self, "nb_page", "Number of B+ tree pages")
+        yield UInt16(self, "nb_level", "Number of levels of B+ tree")
+        yield UInt16(self, "nb_entry", "Number of entries in B+ tree")
+
+        size = (self.size - self.current_size)//8
+        if size:
+            yield PaddingBytes(self, "reserved_space", size)
+
+class HlpFile(Parser):
+    PARSER_TAGS = {
+        "id": "hlp",
+        "category": "misc",
+        "file_ext": ("hlp",),
+        "min_size": 32,
+        "description": "Microsoft Windows Help (HLP)",
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self["magic"].value != 0x00035F3F:
+            return "Invalid magic"
+        if self["filesize"].value != self.stream.size//8:
+            return "Invalid magic"
+        return True
+
+    def createFields(self):
+        yield textHandler(UInt32(self, "magic"), hexadecimal)
+        yield UInt32(self, "dir_start", "Directory start")
+        yield Int32(self, "first_free_block", "First free block")
+        yield UInt32(self, "filesize", "File size in bytes")
+
+        yield self.seekByte(self["dir_start"].value)
+        yield FileEntry(self, "file[]")
+
+        size = (self.size - self.current_size)//8
+        if size:
+            yield RawBytes(self, "end", size)
+
diff --git a/lib/hachoir_parser/misc/lnk.py b/lib/hachoir_parser/misc/lnk.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e67bf1b07d06972ad04f212e9055d8f450cbd12
--- /dev/null
+++ b/lib/hachoir_parser/misc/lnk.py
@@ -0,0 +1,582 @@
+"""
+Windows Shortcut (.lnk) parser.
+
+Documents:
+- The Windows Shortcut File Format (document version 1.0)
+  Reverse-engineered by Jesse Hager
+  http://www.i2s-lab.com/Papers/The_Windows_Shortcut_File_Format.pdf
+- Wine source code:
+  http://source.winehq.org/source/include/shlobj.h (SHELL_LINK_DATA_FLAGS enum)
+  http://source.winehq.org/source/dlls/shell32/pidl.h
+- Microsoft:
+  http://msdn2.microsoft.com/en-us/library/ms538128.aspx
+
+Author: Robert Xiao, Victor Stinner
+
+Changes:
+  2007-06-27 - Robert Xiao
+    * Fixes to FileLocationInfo to correctly handle Unicode paths
+  2007-06-13 - Robert Xiao
+    * ItemID, FileLocationInfo and ExtraInfo structs, correct Unicode string handling
+  2007-03-15 - Victor Stinner
+    * Creation of the parser
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    CString, String,
+    UInt32, UInt16, UInt8,
+    Bit, Bits, PaddingBits,
+    TimestampWin64, DateTimeMSDOS32,
+    NullBytes, PaddingBytes, RawBytes, Enum)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_parser.common.win32 import GUID
+from hachoir_parser.common.msdos import MSDOSFileAttr16, MSDOSFileAttr32
+from hachoir_core.text_handler import filesizeHandler
+
+from hachoir_core.tools import paddingSize
+
+class ItemIdList(FieldSet):
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = (self["size"].value+2) * 8
+
+    def createFields(self):
+        yield UInt16(self, "size", "Size of item ID list")
+        while True:
+            item = ItemId(self, "itemid[]")
+            yield item
+            if not item["length"].value:
+                break
+
+class ItemId(FieldSet):
+    ITEM_TYPE = {
+        0x1F: "GUID",
+        0x23: "Drive",
+        0x25: "Drive",
+        0x29: "Drive",
+        0x2E: "GUID",
+        0x2F: "Drive",
+        0x30: "Dir/File",
+        0x31: "Directory",
+        0x32: "File",
+        0x34: "File [Unicode Name]",
+        0x41: "Workgroup",
+        0x42: "Computer",
+        0x46: "Net Provider",
+        0x47: "Whole Network",
+        0x61: "MSITStore",
+        0x70: "Printer/RAS Connection",
+        0xB1: "History/Favorite",
+        0xC3: "Network Share",
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        if self["length"].value:
+            self._size = self["length"].value * 8
+        else:
+            self._size = 16
+
+    def createFields(self):
+        yield UInt16(self, "length", "Length of Item ID Entry")
+        if not self["length"].value:
+            return
+
+        yield Enum(UInt8(self, "type"),self.ITEM_TYPE)
+        entrytype=self["type"].value
+        if entrytype in (0x1F, 0x2E, 0x70):
+            # GUID
+            yield RawBytes(self, "dummy", 1, "should be 0x50")
+            yield GUID(self, "guid")
+
+        elif entrytype in (0x23, 0x25, 0x29, 0x2F):
+            # Drive
+            yield String(self, "drive", self["length"].value-3, strip="\0")
+
+        elif entrytype in (0x30, 0x31, 0x32):
+            yield RawBytes(self, "dummy", 1, "should be 0x00")
+            yield UInt32(self, "size", "size of file; 0 for folders")
+            yield DateTimeMSDOS32(self, "date_time", "File/folder date and time")
+            yield MSDOSFileAttr16(self, "attribs", "File/folder attributes")
+            yield CString(self, "name", "File/folder name")
+            if self.root.hasUnicodeNames():
+                # Align to 2-bytes
+                n = paddingSize(self.current_size//8, 2)
+                if n:
+                    yield PaddingBytes(self, "pad", n)
+
+                yield UInt16(self, "length_w", "Length of wide struct member")
+                yield RawBytes(self, "unknown[]", 6)
+                yield DateTimeMSDOS32(self, "creation_date_time", "File/folder creation date and time")
+                yield DateTimeMSDOS32(self, "access_date_time", "File/folder last access date and time")
+                yield RawBytes(self, "unknown[]", 4)
+                yield CString(self, "unicode_name", "File/folder name", charset="UTF-16-LE")
+                yield RawBytes(self, "unknown[]", 2)
+            else:
+                yield CString(self, "name_short", "File/folder short name")
+
+        elif entrytype in (0x41, 0x42, 0x46):
+            yield RawBytes(self, "unknown[]", 2)
+            yield CString(self, "name")
+            yield CString(self, "protocol")
+            yield RawBytes(self, "unknown[]", 2)
+
+        elif entrytype == 0x47:
+            # Whole Network
+            yield RawBytes(self, "unknown[]", 2)
+            yield CString(self, "name")
+
+        elif entrytype == 0xC3:
+            # Network Share
+            yield RawBytes(self, "unknown[]", 2)
+            yield CString(self, "name")
+            yield CString(self, "protocol")
+            yield CString(self, "description")
+            yield RawBytes(self, "unknown[]", 2)
+
+        else:
+            yield RawBytes(self, "raw", self["length"].value-3)
+
+    def createDescription(self):
+        if self["length"].value:
+            return "Item ID Entry: "+self.ITEM_TYPE.get(self["type"].value,"Unknown")
+        else:
+            return "End of Item ID List"
+
+def formatVolumeSerial(field):
+    val = field.value
+    return '%04X-%04X'%(val>>16, val&0xFFFF)
+
+class LocalVolumeTable(FieldSet):
+    VOLUME_TYPE={
+        1: "No root directory",
+        2: "Removable (Floppy, Zip, etc.)",
+        3: "Fixed (Hard disk)",
+        4: "Remote (Network drive)",
+        5: "CD-ROM",
+        6: "Ram drive",
+    }
+
+    def createFields(self):
+        yield UInt32(self, "length", "Length of this structure")
+        yield Enum(UInt32(self, "volume_type", "Volume Type"),self.VOLUME_TYPE)
+        yield textHandler(UInt32(self, "volume_serial", "Volume Serial Number"), formatVolumeSerial)
+
+        yield UInt32(self, "label_offset", "Offset to volume label")
+        padding = self.seekByte(self["label_offset"].value)
+        if padding:
+            yield padding
+        yield CString(self, "drive")
+
+    def hasValue(self):
+        return bool(self["drive"].value)
+
+    def createValue(self):
+        return self["drive"].value
+
+class NetworkVolumeTable(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "length", "Length of this structure")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "share_name_offset", "Offset to share name")
+        yield UInt32(self, "unknown[]")
+        yield UInt32(self, "unknown[]")
+        padding = self.seekByte(self["share_name_offset"].value)
+        if padding:
+            yield padding
+        yield CString(self, "share_name")
+
+    def createValue(self):
+        return self["share_name"].value
+
+class FileLocationInfo(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "length", "Length of this structure")
+        if not self["length"].value:
+            return
+
+        yield UInt32(self, "first_offset_pos", "Position of first offset")
+        has_unicode_paths = (self["first_offset_pos"].value == 0x24)
+        yield Bit(self, "on_local_volume")
+        yield Bit(self, "on_network_volume")
+        yield PaddingBits(self, "reserved[]", 30)
+        yield UInt32(self, "local_info_offset", "Offset to local volume table; only meaningful if on_local_volume = 1")
+        yield UInt32(self, "local_pathname_offset", "Offset to local base pathname; only meaningful if on_local_volume = 1")
+        yield UInt32(self, "remote_info_offset", "Offset to network volume table; only meaningful if on_network_volume = 1")
+        yield UInt32(self, "pathname_offset", "Offset of remaining pathname")
+        if has_unicode_paths:
+            yield UInt32(self, "local_pathname_unicode_offset", "Offset to Unicode version of local base pathname; only meaningful if on_local_volume = 1")
+            yield UInt32(self, "pathname_unicode_offset", "Offset to Unicode version of remaining pathname")
+        if self["on_local_volume"].value:
+            padding = self.seekByte(self["local_info_offset"].value)
+            if padding:
+                yield padding
+            yield LocalVolumeTable(self, "local_volume_table", "Local Volume Table")
+
+            padding = self.seekByte(self["local_pathname_offset"].value)
+            if padding:
+                yield padding
+            yield CString(self, "local_base_pathname", "Local Base Pathname")
+            if has_unicode_paths:
+                padding = self.seekByte(self["local_pathname_unicode_offset"].value)
+                if padding:
+                    yield padding
+                yield CString(self, "local_base_pathname_unicode", "Local Base Pathname in Unicode", charset="UTF-16-LE")
+
+        if self["on_network_volume"].value:
+            padding = self.seekByte(self["remote_info_offset"].value)
+            if padding:
+                yield padding
+            yield NetworkVolumeTable(self, "network_volume_table")
+
+        padding = self.seekByte(self["pathname_offset"].value)
+        if padding:
+            yield padding
+        yield CString(self, "final_pathname", "Final component of the pathname")
+
+        if has_unicode_paths:
+            padding = self.seekByte(self["pathname_unicode_offset"].value)
+            if padding:
+                yield padding
+            yield CString(self, "final_pathname_unicode", "Final component of the pathname in Unicode", charset="UTF-16-LE")
+
+        padding=self.seekByte(self["length"].value)
+        if padding:
+            yield padding
+
+class LnkString(FieldSet):
+    def createFields(self):
+        yield UInt16(self, "length", "Length of this string")
+        if self.root.hasUnicodeNames():
+            yield String(self, "data", self["length"].value*2, charset="UTF-16-LE")
+        else:
+            yield String(self, "data", self["length"].value, charset="ASCII")
+
+    def createValue(self):
+        return self["data"].value
+
+class ColorRef(FieldSet):
+    ''' COLORREF struct, 0x00bbggrr '''
+    static_size=32
+    def createFields(self):
+        yield UInt8(self, "red", "Red")
+        yield UInt8(self, "green", "Green")
+        yield UInt8(self, "blue", "Blue")
+        yield PaddingBytes(self, "pad", 1, "Padding (must be 0)")
+    def createDescription(self):
+        rgb = self["red"].value, self["green"].value, self["blue"].value
+        return "RGB Color: #%02X%02X%02X" % rgb
+
+class ColorTableIndex(Bits):
+    def __init__(self, parent, name, size, description=None):
+        Bits.__init__(self, parent, name, size, None)
+        self.desc=description
+    def createDescription(self):
+        assert hasattr(self, 'parent') and hasattr(self, 'value')
+        return "%s: %s"%(self.desc,
+                         self.parent["color[%i]"%self.value].description)
+
+class ExtraInfo(FieldSet):
+    INFO_TYPE={
+        0xA0000001: "Link Target Information", # EXP_SZ_LINK_SIG
+        0xA0000002: "Console Window Properties", # NT_CONSOLE_PROPS_SIG
+        0xA0000003: "Hostname and Other Stuff",
+        0xA0000004: "Console Codepage Information", # NT_FE_CONSOLE_PROPS_SIG
+        0xA0000005: "Special Folder Info", # EXP_SPECIAL_FOLDER_SIG
+        0xA0000006: "DarwinID (Windows Installer ID) Information", # EXP_DARWIN_ID_SIG
+        0xA0000007: "Custom Icon Details", # EXP_LOGO3_ID_SIG or EXP_SZ_ICON_SIG
+    }
+    SPECIAL_FOLDER = {
+         0: "DESKTOP",
+         1: "INTERNET",
+         2: "PROGRAMS",
+         3: "CONTROLS",
+         4: "PRINTERS",
+         5: "PERSONAL",
+         6: "FAVORITES",
+         7: "STARTUP",
+         8: "RECENT",
+         9: "SENDTO",
+        10: "BITBUCKET",
+        11: "STARTMENU",
+        16: "DESKTOPDIRECTORY",
+        17: "DRIVES",
+        18: "NETWORK",
+        19: "NETHOOD",
+        20: "FONTS",
+        21: "TEMPLATES",
+        22: "COMMON_STARTMENU",
+        23: "COMMON_PROGRAMS",
+        24: "COMMON_STARTUP",
+        25: "COMMON_DESKTOPDIRECTORY",
+        26: "APPDATA",
+        27: "PRINTHOOD",
+        28: "LOCAL_APPDATA",
+        29: "ALTSTARTUP",
+        30: "COMMON_ALTSTARTUP",
+        31: "COMMON_FAVORITES",
+        32: "INTERNET_CACHE",
+        33: "COOKIES",
+        34: "HISTORY",
+        35: "COMMON_APPDATA",
+        36: "WINDOWS",
+        37: "SYSTEM",
+        38: "PROGRAM_FILES",
+        39: "MYPICTURES",
+        40: "PROFILE",
+        41: "SYSTEMX86",
+        42: "PROGRAM_FILESX86",
+        43: "PROGRAM_FILES_COMMON",
+        44: "PROGRAM_FILES_COMMONX86",
+        45: "COMMON_TEMPLATES",
+        46: "COMMON_DOCUMENTS",
+        47: "COMMON_ADMINTOOLS",
+        48: "ADMINTOOLS",
+        49: "CONNECTIONS",
+        53: "COMMON_MUSIC",
+        54: "COMMON_PICTURES",
+        55: "COMMON_VIDEO",
+        56: "RESOURCES",
+        57: "RESOURCES_LOCALIZED",
+        58: "COMMON_OEM_LINKS",
+        59: "CDBURN_AREA",
+        61: "COMPUTERSNEARME",
+    }
+    BOOL_ENUM = {
+        0: "False",
+        1: "True",
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        if self["length"].value:
+            self._size = self["length"].value * 8
+        else:
+            self._size = 32
+
+    def createFields(self):
+        yield UInt32(self, "length", "Length of this structure")
+        if not self["length"].value:
+            return
+
+        yield Enum(textHandler(UInt32(self, "signature", "Signature determining the function of this structure"),hexadecimal),self.INFO_TYPE)
+
+        if self["signature"].value == 0xA0000003:
+            # Hostname and Other Stuff
+            yield UInt32(self, "remaining_length")
+            yield UInt32(self, "unknown[]")
+            yield String(self, "hostname", 16, "Computer hostname on which shortcut was last modified", strip="\0")
+            yield RawBytes(self, "unknown[]", 32)
+            yield RawBytes(self, "unknown[]", 32)
+
+        elif self["signature"].value == 0xA0000005:
+            # Special Folder Info
+            yield Enum(UInt32(self, "special_folder_id", "ID of the special folder"),self.SPECIAL_FOLDER)
+            yield UInt32(self, "offset", "Offset to Item ID entry")
+
+        elif self["signature"].value in (0xA0000001, 0xA0000006, 0xA0000007):
+            if self["signature"].value == 0xA0000001: # Link Target Information
+                object_name="target"
+            elif self["signature"].value == 0xA0000006: # DarwinID (Windows Installer ID) Information
+                object_name="darwinID"
+            else: # Custom Icon Details
+                object_name="icon_path"
+            yield CString(self, object_name, "Data (ASCII format)", charset="ASCII")
+            remaining = self["length"].value - self.current_size/8 - 260*2 # 260*2 = size of next part
+            if remaining:
+                yield RawBytes(self, "slack_space[]", remaining, "Data beyond end of string")
+            yield CString(self, object_name+'_unicode', "Data (Unicode format)", charset="UTF-16-LE", truncate="\0")
+            remaining = self["length"].value - self.current_size/8
+            if remaining:
+                yield RawBytes(self, "slack_space[]", remaining, "Data beyond end of string")
+
+        elif self["signature"].value == 0xA0000002:
+            # Console Window Properties
+            yield ColorTableIndex(self, "color_text", 4, "Screen text color index")
+            yield ColorTableIndex(self, "color_bg", 4, "Screen background color index")
+            yield NullBytes(self, "reserved[]", 1)
+            yield ColorTableIndex(self, "color_popup_text", 4, "Pop-up text color index")
+            yield ColorTableIndex(self, "color_popup_bg", 4, "Pop-up background color index")
+            yield NullBytes(self, "reserved[]", 1)
+            yield UInt16(self, "buffer_width", "Screen buffer width (character cells)")
+            yield UInt16(self, "buffer_height", "Screen buffer height (character cells)")
+            yield UInt16(self, "window_width", "Window width (character cells)")
+            yield UInt16(self, "window_height", "Window height (character cells)")
+            yield UInt16(self, "position_left", "Window distance from left edge (screen coords)")
+            yield UInt16(self, "position_top", "Window distance from top edge (screen coords)")
+            yield UInt32(self, "font_number")
+            yield UInt32(self, "input_buffer_size")
+            yield UInt16(self, "font_width", "Font width in pixels; 0 for a non-raster font")
+            yield UInt16(self, "font_height", "Font height in pixels; equal to the font size for non-raster fonts")
+            yield UInt32(self, "font_family")
+            yield UInt32(self, "font_weight")
+            yield String(self, "font_name_unicode", 64, "Font Name (Unicode format)", charset="UTF-16-LE", truncate="\0")
+            yield UInt32(self, "cursor_size", "Relative size of cursor (% of character size)")
+            yield Enum(UInt32(self, "full_screen", "Run console in full screen?"), self.BOOL_ENUM)
+            yield Enum(UInt32(self, "quick_edit", "Console uses quick-edit feature (using mouse to cut & paste)?"), self.BOOL_ENUM)
+            yield Enum(UInt32(self, "insert_mode", "Console uses insertion mode?"), self.BOOL_ENUM)
+            yield Enum(UInt32(self, "auto_position", "System automatically positions window?"), self.BOOL_ENUM)
+            yield UInt32(self, "history_size", "Size of the history buffer (in lines)")
+            yield UInt32(self, "history_count", "Number of history buffers (each process gets one up to this limit)")
+            yield Enum(UInt32(self, "history_no_dup", "Automatically eliminate duplicate lines in the history buffer?"), self.BOOL_ENUM)
+            for index in xrange(16):
+                yield ColorRef(self, "color[]")
+
+        elif self["signature"].value == 0xA0000004:
+            # Console Codepage Information
+            yield UInt32(self, "codepage", "Console's code page")
+
+        else:
+            yield RawBytes(self, "raw", self["length"].value-self.current_size/8)
+
+    def createDescription(self):
+        if self["length"].value:
+            return "Extra Info Entry: "+self["signature"].display
+        else:
+            return "End of Extra Info"
+
+HOT_KEYS = {
+    0x00: u'None',
+    0x13: u'Pause',
+    0x14: u'Caps Lock',
+    0x21: u'Page Up',
+    0x22: u'Page Down',
+    0x23: u'End',
+    0x24: u'Home',
+    0x25: u'Left',
+    0x26: u'Up',
+    0x27: u'Right',
+    0x28: u'Down',
+    0x2d: u'Insert',
+    0x2e: u'Delete',
+    0x6a: u'Num *',
+    0x6b: u'Num +',
+    0x6d: u'Num -',
+    0x6e: u'Num .',
+    0x6f: u'Num /',
+    0x90: u'Num Lock',
+    0x91: u'Scroll Lock',
+    0xba: u';',
+    0xbb: u'=',
+    0xbc: u',',
+    0xbd: u'-',
+    0xbe: u'.',
+    0xbf: u'/',
+    0xc0: u'`',
+    0xdb: u'[',
+    0xdc: u'\\',
+    0xdd: u']',
+    0xde: u"'",
+}
+
+def text_hot_key(field):
+    assert hasattr(field, "value")
+    val=field.value
+    if 0x30 <= val <= 0x39:
+        return unichr(val)
+    elif 0x41 <= val <= 0x5A:
+        return unichr(val)
+    elif 0x60 <= val <= 0x69:
+        return u'Numpad %c' % unichr(val-0x30)
+    elif 0x70 <= val <= 0x87:
+        return 'F%i'%(val-0x6F)
+    elif val in HOT_KEYS:
+        return HOT_KEYS[val]
+    return str(val)
+
+class LnkFile(Parser):
+    MAGIC = "\x4C\0\0\0\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x46"
+    PARSER_TAGS = {
+        "id": "lnk",
+        "category": "misc",
+        "file_ext": ("lnk",),
+        "mime": (u"application/x-ms-shortcut",),
+        "magic": ((MAGIC, 0),),
+        "min_size": len(MAGIC)*8,   # signature + guid = 20 bytes
+        "description": "Windows Shortcut (.lnk)",
+    }
+    endian = LITTLE_ENDIAN
+
+    SHOW_WINDOW_STATE = {
+         0: u"Hide",
+         1: u"Show Normal",
+         2: u"Show Minimized",
+         3: u"Show Maximized",
+         4: u"Show Normal, not activated",
+         5: u"Show",
+         6: u"Minimize",
+         7: u"Show Minimized, not activated",
+         8: u"Show, not activated",
+         9: u"Restore",
+        10: u"Show Default",
+    }
+
+    def validate(self):
+        if self["signature"].value != 0x0000004C:
+            return "Invalid signature"
+        if self["guid"].value != "00021401-0000-0000-C000-000000000046":
+            return "Invalid GUID"
+        return True
+
+    def hasUnicodeNames(self):
+        return self["has_unicode_names"].value
+
+    def createFields(self):
+        yield UInt32(self, "signature", "Shortcut signature (0x0000004C)")
+        yield GUID(self, "guid", "Shortcut GUID (00021401-0000-0000-C000-000000000046)")
+
+        yield Bit(self, "has_shell_id", "Is the Item ID List present?")
+        yield Bit(self, "target_is_file", "Is a file or a directory?")
+        yield Bit(self, "has_description", "Is the Description field present?")
+        yield Bit(self, "has_rel_path", "Is the relative path to the target available?")
+        yield Bit(self, "has_working_dir", "Is there a working directory?")
+        yield Bit(self, "has_cmd_line_args", "Are there any command line arguments?")
+        yield Bit(self, "has_custom_icon", "Is there a custom icon?")
+        yield Bit(self, "has_unicode_names", "Are Unicode names used?")
+        yield Bit(self, "force_no_linkinfo")
+        yield Bit(self, "has_exp_sz")
+        yield Bit(self, "run_in_separate")
+        yield Bit(self, "has_logo3id", "Is LOGO3 ID info present?")
+        yield Bit(self, "has_darwinid", "Is the DarwinID info present?")
+        yield Bit(self, "runas_user", "Is the target run as another user?")
+        yield Bit(self, "has_exp_icon_sz", "Is custom icon information available?")
+        yield Bit(self, "no_pidl_alias")
+        yield Bit(self, "force_unc_name")
+        yield Bit(self, "run_with_shim_layer")
+        yield PaddingBits(self, "reserved[]", 14, "Flag bits reserved for future use")
+
+        yield MSDOSFileAttr32(self, "target_attr")
+
+        yield TimestampWin64(self, "creation_time")
+        yield TimestampWin64(self, "modification_time")
+        yield TimestampWin64(self, "last_access_time")
+        yield filesizeHandler(UInt32(self, "target_filesize"))
+        yield UInt32(self, "icon_number")
+        yield Enum(UInt32(self, "show_window"), self.SHOW_WINDOW_STATE)
+        yield textHandler(UInt8(self, "hot_key", "Hot key used for quick access"),text_hot_key)
+        yield Bit(self, "hot_key_shift", "Hot key: is Shift used?")
+        yield Bit(self, "hot_key_ctrl", "Hot key: is Ctrl used?")
+        yield Bit(self, "hot_key_alt", "Hot key: is Alt used?")
+        yield PaddingBits(self, "hot_key_reserved", 21, "Hot key: (reserved)")
+        yield NullBytes(self, "reserved[]", 8)
+
+        if self["has_shell_id"].value:
+            yield ItemIdList(self, "item_idlist", "Item ID List")
+        if self["target_is_file"].value:
+            yield FileLocationInfo(self, "file_location_info", "File Location Info")
+        if self["has_description"].value:
+            yield LnkString(self, "description")
+        if self["has_rel_path"].value:
+            yield LnkString(self, "relative_path", "Relative path to target")
+        if self["has_working_dir"].value:
+            yield LnkString(self, "working_dir", "Working directory (dir to start target in)")
+        if self["has_cmd_line_args"].value:
+            yield LnkString(self, "cmd_line_args", "Command Line Arguments")
+        if self["has_custom_icon"].value:
+            yield LnkString(self, "custom_icon", "Custom Icon Path")
+
+        while not self.eof:
+            yield ExtraInfo(self, "extra_info[]")
+
diff --git a/lib/hachoir_parser/misc/msoffice.py b/lib/hachoir_parser/misc/msoffice.py
new file mode 100644
index 0000000000000000000000000000000000000000..90ca1ca7add823678877e04a3cbadc7c5106ccb3
--- /dev/null
+++ b/lib/hachoir_parser/misc/msoffice.py
@@ -0,0 +1,131 @@
+"""
+Parsers for the different streams and fragments found in an OLE2 file.
+
+Documents:
+ - goffice source code
+
+Author: Robert Xiao, Victor Stinner
+Creation: 2006-04-23
+"""
+
+from hachoir_parser import HachoirParser
+from hachoir_core.field import FieldSet, RootSeekableFieldSet, RawBytes
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.stream import StringInputStream
+from hachoir_parser.misc.msoffice_summary import SummaryFieldSet, CompObj
+from hachoir_parser.misc.word_doc import WordDocumentFieldSet
+
+PROPERTY_NAME = {
+    u"\5DocumentSummaryInformation": "doc_summary",
+    u"\5SummaryInformation": "summary",
+    u"WordDocument": "word_doc",
+}
+
+class OfficeRootEntry(HachoirParser, RootSeekableFieldSet):
+    PARSER_TAGS = {
+        "description": "Microsoft Office document subfragments",
+    }
+    endian = LITTLE_ENDIAN
+
+    def __init__(self, stream, **args):
+        RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self))
+        HachoirParser.__init__(self, stream, **args)
+
+    def validate(self):
+        return True
+
+    def createFields(self):
+        for index, property in enumerate(self.ole2.properties):
+            if index == 0:
+                continue
+            try:
+                name = PROPERTY_NAME[property["name"].value]
+            except LookupError:
+                name = property.name+"content"
+            for field in self.parseProperty(index, property, name):
+                yield field
+
+    def parseProperty(self, property_index, property, name_prefix):
+        ole2 = self.ole2
+        if not property["size"].value:
+            return
+        if property["size"].value >= ole2["header/threshold"].value:
+            return
+        name = "%s[]" % name_prefix
+        first = None
+        previous = None
+        size = 0
+        start = property["start"].value
+        chain = ole2.getChain(start, True)
+        blocksize = ole2.ss_size
+        desc_format = "Small blocks %s..%s (%s)"
+        while True:
+            try:
+                block = chain.next()
+                contiguous = False
+                if not first:
+                    first = block
+                    contiguous = True
+                if previous and block == (previous+1):
+                    contiguous = True
+                if contiguous:
+                    previous = block
+                    size += blocksize
+                    continue
+            except StopIteration:
+                block = None
+            self.seekSBlock(first)
+            desc = desc_format % (first, previous, previous-first+1)
+            size = min(size, property["size"].value*8)
+            if name_prefix in ("summary", "doc_summary"):
+                yield SummaryFieldSet(self, name, desc, size=size)
+            elif name_prefix == "word_doc":
+                yield WordDocumentFieldSet(self, name, desc, size=size)
+            elif property_index == 1:
+                yield CompObj(self, "comp_obj", desc, size=size)
+            else:
+                yield RawBytes(self, name, size//8, desc)
+            if block is None:
+                break
+            first = block
+            previous = block
+            size = ole2.sector_size
+
+    def seekSBlock(self, block):
+        self.seekBit(block * self.ole2.ss_size)
+
+class FragmentGroup:
+    def __init__(self, parser):
+        self.items = []
+        self.parser = parser
+
+    def add(self, item):
+        self.items.append(item)
+
+    def createInputStream(self):
+        # FIXME: Use lazy stream creation
+        data = []
+        for item in self.items:
+            data.append( item["rawdata"].value )
+        data = "".join(data)
+
+        # FIXME: Use smarter code to send arguments
+        args = {"ole2": self.items[0].root}
+        tags = {"class": self.parser, "args": args}
+        tags = tags.iteritems()
+        return StringInputStream(data, "<fragment group>", tags=tags)
+
+class CustomFragment(FieldSet):
+    def __init__(self, parent, name, size, parser, description=None, group=None):
+        FieldSet.__init__(self, parent, name, description, size=size)
+        if not group:
+            group = FragmentGroup(parser)
+        self.group = group
+        self.group.add(self)
+
+    def createFields(self):
+        yield RawBytes(self, "rawdata", self.size//8)
+
+    def _createInputStream(self, **args):
+        return self.group.createInputStream()
+
diff --git a/lib/hachoir_parser/misc/msoffice_summary.py b/lib/hachoir_parser/misc/msoffice_summary.py
new file mode 100644
index 0000000000000000000000000000000000000000..dd3234af12b321e01833b977d5aa50a616526546
--- /dev/null
+++ b/lib/hachoir_parser/misc/msoffice_summary.py
@@ -0,0 +1,377 @@
+"""
+Microsoft Document summaries structures.
+
+Documents
+---------
+
+ - Apache POI (HPSF Internals):
+   http://poi.apache.org/hpsf/internals.html
+"""
+from hachoir_parser import HachoirParser
+from hachoir_core.field import (FieldSet, ParserError,
+    RootSeekableFieldSet, SeekableFieldSet,
+    Bit, Bits, NullBits,
+    UInt8, UInt16, UInt32, TimestampWin64, TimedeltaWin64, Enum,
+    Bytes, RawBytes, NullBytes, String,
+    Int8, Int32, Float32, Float64, PascalString32)
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+from hachoir_core.tools import createDict
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+from hachoir_parser.common.win32 import GUID, PascalStringWin32, CODEPAGE_CHARSET
+from hachoir_parser.image.bmp import BmpHeader, parseImageData
+
+MAX_SECTION_COUNT = 100
+
+OS_MAC = 1
+OS_NAME = {
+    0: "Windows 16-bit",
+    1: "Macintosh",
+    2: "Windows 32-bit",
+}
+
+class OSConfig:
+    def __init__(self, big_endian):
+        if big_endian:
+            self.charset = "MacRoman"
+            self.utf16 = "UTF-16-BE"
+        else:
+            # FIXME: Don't guess the charset, use ISO-8859-1 or UTF-8
+            #self.charset = "ISO-8859-1"
+            self.charset = None
+            self.utf16 = "UTF-16-LE"
+
+class PropertyIndex(FieldSet):
+    TAG_CODEPAGE = 1
+
+    COMMON_PROPERTY = {
+        0: "Dictionary",
+        1: "CodePage",
+        0x80000000: "LOCALE_SYSTEM_DEFAULT",
+        0x80000003: "CASE_SENSITIVE",
+    }
+
+    DOCUMENT_PROPERTY = {
+         2: "Category",
+         3: "PresentationFormat",
+         4: "NumBytes",
+         5: "NumLines",
+         6: "NumParagraphs",
+         7: "NumSlides",
+         8: "NumNotes",
+         9: "NumHiddenSlides",
+        10: "NumMMClips",
+        11: "Scale",
+        12: "HeadingPairs",
+        13: "DocumentParts",
+        14: "Manager",
+        15: "Company",
+        16: "LinksDirty",
+        17: "DocSumInfo_17",
+        18: "DocSumInfo_18",
+        19: "DocSumInfo_19",
+        20: "DocSumInfo_20",
+        21: "DocSumInfo_21",
+        22: "DocSumInfo_22",
+        23: "DocSumInfo_23",
+    }
+    DOCUMENT_PROPERTY.update(COMMON_PROPERTY)
+
+    COMPONENT_PROPERTY = {
+         2: "Title",
+         3: "Subject",
+         4: "Author",
+         5: "Keywords",
+         6: "Comments",
+         7: "Template",
+         8: "LastSavedBy",
+         9: "RevisionNumber",
+        10: "TotalEditingTime",
+        11: "LastPrinted",
+        12: "CreateTime",
+        13: "LastSavedTime",
+        14: "NumPages",
+        15: "NumWords",
+        16: "NumCharacters",
+        17: "Thumbnail",
+        18: "AppName",
+        19: "Security",
+    }
+    COMPONENT_PROPERTY.update(COMMON_PROPERTY)
+
+    def createFields(self):
+        if self["../.."].name.startswith("doc_summary"):
+            enum = self.DOCUMENT_PROPERTY
+        else:
+            enum = self.COMPONENT_PROPERTY
+        yield Enum(UInt32(self, "id"), enum)
+        yield UInt32(self, "offset")
+
+    def createDescription(self):
+        return "Property: %s" % self["id"].display
+
+class Bool(Int8):
+    def createValue(self):
+        value = Int8.createValue(self)
+        return (value == -1)
+
+class Thumbnail(FieldSet):
+    """
+    Thumbnail.
+
+    Documents:
+    - See Jakarta POI
+      http://jakarta.apache.org/poi/hpsf/thumbnails.html
+      http://www.penguin-soft.com/penguin/developer/poi/
+          org/apache/poi/hpsf/Thumbnail.html#CF_BITMAP
+    - How To Extract Thumbnail Images
+      http://sparks.discreet.com/knowledgebase/public/
+          solutions/ExtractThumbnailImg.htm
+    """
+    FORMAT_CLIPBOARD = -1
+    FORMAT_NAME = {
+        -1: "Windows clipboard",
+        -2: "Macintosh clipboard",
+        -3: "GUID that contains format identifier",
+         0: "No data",
+         2: "Bitmap",
+         3: "Windows metafile format",
+         8: "Device Independent Bitmap (DIB)",
+        14: "Enhanced Windows metafile",
+    }
+
+    DIB_BMP = 8
+    DIB_FORMAT = {
+        2: "Bitmap Obsolete (old BMP)",
+        3: "Windows metafile format (WMF)",
+        8: "Device Independent Bitmap (BMP)",
+       14: "Enhanced Windows metafile (EMF)",
+    }
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["size"].value * 8
+
+    def createFields(self):
+        yield filesizeHandler(UInt32(self, "size"))
+        yield Enum(Int32(self, "format"), self.FORMAT_NAME)
+        if self["format"].value == self.FORMAT_CLIPBOARD:
+            yield Enum(UInt32(self, "dib_format"), self.DIB_FORMAT)
+            if self["dib_format"].value == self.DIB_BMP:
+                yield BmpHeader(self, "bmp_header")
+                size = (self.size - self.current_size) // 8
+                yield parseImageData(self, "pixels", size, self["bmp_header"])
+                return
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield RawBytes(self, "data", size)
+
+class PropertyContent(FieldSet):
+    TYPE_LPSTR = 30
+    TYPE_INFO = {
+        0: ("EMPTY", None),
+        1: ("NULL", None),
+        2: ("UInt16", UInt16),
+        3: ("UInt32", UInt32),
+        4: ("Float32", Float32),
+        5: ("Float64", Float64),
+        6: ("CY", None),
+        7: ("DATE", None),
+        8: ("BSTR", None),
+        9: ("DISPATCH", None),
+        10: ("ERROR", None),
+        11: ("BOOL", Bool),
+        12: ("VARIANT", None),
+        13: ("UNKNOWN", None),
+        14: ("DECIMAL", None),
+        16: ("I1", None),
+        17: ("UI1", None),
+        18: ("UI2", None),
+        19: ("UI4", None),
+        20: ("I8", None),
+        21: ("UI8", None),
+        22: ("INT", None),
+        23: ("UINT", None),
+        24: ("VOID", None),
+        25: ("HRESULT", None),
+        26: ("PTR", None),
+        27: ("SAFEARRAY", None),
+        28: ("CARRAY", None),
+        29: ("USERDEFINED", None),
+        30: ("LPSTR", PascalString32),
+        31: ("LPWSTR", PascalString32),
+        64: ("FILETIME", TimestampWin64),
+        65: ("BLOB", None),
+        66: ("STREAM", None),
+        67: ("STORAGE", None),
+        68: ("STREAMED_OBJECT", None),
+        69: ("STORED_OBJECT", None),
+        70: ("BLOB_OBJECT", None),
+        71: ("THUMBNAIL", Thumbnail),
+        72: ("CLSID", None),
+        0x1000: ("Vector", None),
+    }
+    TYPE_NAME = createDict(TYPE_INFO, 0)
+
+    def createFields(self):
+        self.osconfig = self.parent.osconfig
+        if True:
+            yield Enum(Bits(self, "type", 12), self.TYPE_NAME)
+            yield Bit(self, "is_vector")
+            yield NullBits(self, "padding", 32-12-1)
+        else:
+            yield Enum(Bits(self, "type", 32), self.TYPE_NAME)
+        tag =  self["type"].value
+        kw = {}
+        try:
+            handler = self.TYPE_INFO[tag][1]
+            if handler == PascalString32:
+                osconfig = self.osconfig
+                if tag == self.TYPE_LPSTR:
+                    kw["charset"] = osconfig.charset
+                else:
+                    kw["charset"] = osconfig.utf16
+            elif handler == TimestampWin64:
+                if self.description == "TotalEditingTime":
+                    handler = TimedeltaWin64
+        except LookupError:
+            handler = None
+        if not handler:
+            raise ParserError("OLE2: Unable to parse property of type %s" \
+                % self["type"].display)
+        if self["is_vector"].value:
+            yield UInt32(self, "count")
+            for index in xrange(self["count"].value):
+                yield handler(self, "item[]", **kw)
+        else:
+            yield handler(self, "value", **kw)
+            self.createValue = lambda: self["value"].value
+PropertyContent.TYPE_INFO[12] = ("VARIANT", PropertyContent)
+
+class SummarySection(SeekableFieldSet):
+    def __init__(self, *args):
+        SeekableFieldSet.__init__(self, *args)
+        self._size = self["size"].value * 8
+
+    def createFields(self):
+        self.osconfig = self.parent.osconfig
+        yield UInt32(self, "size")
+        yield UInt32(self, "property_count")
+        for index in xrange(self["property_count"].value):
+            yield PropertyIndex(self, "property_index[]")
+        for index in xrange(self["property_count"].value):
+            findex = self["property_index[%u]" % index]
+            self.seekByte(findex["offset"].value)
+            field = PropertyContent(self, "property[]", findex["id"].display)
+            yield field
+            if not self.osconfig.charset \
+            and findex['id'].value == PropertyIndex.TAG_CODEPAGE:
+                codepage = field['value'].value
+                if codepage in CODEPAGE_CHARSET:
+                    self.osconfig.charset = CODEPAGE_CHARSET[codepage]
+                else:
+                    self.warning("Unknown codepage: %r" % codepage)
+
+class SummaryIndex(FieldSet):
+    static_size = 20*8
+    def createFields(self):
+        yield String(self, "name", 16)
+        yield UInt32(self, "offset")
+
+class BaseSummary:
+    endian = LITTLE_ENDIAN
+
+    def __init__(self):
+        if self["endian"].value == "\xFF\xFE":
+            self.endian = BIG_ENDIAN
+        elif self["endian"].value == "\xFE\xFF":
+            self.endian = LITTLE_ENDIAN
+        else:
+            raise ParserError("OLE2: Invalid endian value")
+        self.osconfig = OSConfig(self["os_type"].value == OS_MAC)
+
+    def createFields(self):
+        yield Bytes(self, "endian", 2, "Endian (0xFF 0xFE for Intel)")
+        yield UInt16(self, "format", "Format (0)")
+        yield UInt8(self, "os_version")
+        yield UInt8(self, "os_revision")
+        yield Enum(UInt16(self, "os_type"), OS_NAME)
+        yield GUID(self, "format_id")
+        yield UInt32(self, "section_count")
+        if MAX_SECTION_COUNT < self["section_count"].value:
+            raise ParserError("OLE2: Too much sections (%s)" % self["section_count"].value)
+
+        section_indexes = []
+        for index in xrange(self["section_count"].value):
+            section_index = SummaryIndex(self, "section_index[]")
+            yield section_index
+            section_indexes.append(section_index)
+
+        for section_index in section_indexes:
+            self.seekByte(section_index["offset"].value)
+            yield SummarySection(self, "section[]")
+
+        size = (self.size - self.current_size) // 8
+        if 0 < size:
+            yield NullBytes(self, "end_padding", size)
+
+class SummaryParser(BaseSummary, HachoirParser, RootSeekableFieldSet):
+    PARSER_TAGS = {
+        "description": "Microsoft Office summary",
+    }
+
+    def __init__(self, stream, **kw):
+        RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self))
+        HachoirParser.__init__(self, stream, **kw)
+        BaseSummary.__init__(self)
+
+    def validate(self):
+        return True
+
+class SummaryFieldSet(BaseSummary, FieldSet):
+    def __init__(self, parent, name, description=None, size=None):
+        FieldSet.__init__(self, parent, name, description=description, size=size)
+        BaseSummary.__init__(self)
+
+class CompObj(FieldSet):
+    OS_VERSION = {
+        0x0a03: "Windows 3.1",
+    }
+    def createFields(self):
+        # Header
+        yield UInt16(self, "version", "Version (=1)")
+        yield textHandler(UInt16(self, "endian", "Endian (0xFF 0xFE for Intel)"), hexadecimal)
+        yield UInt8(self, "os_version")
+        yield UInt8(self, "os_revision")
+        yield Enum(UInt16(self, "os_type"), OS_NAME)
+        yield Int32(self, "unused", "(=-1)")
+        yield GUID(self, "clsid")
+
+        # User type
+        yield PascalString32(self, "user_type", strip="\0")
+
+        # Clipboard format
+        if self["os_type"].value == OS_MAC:
+            yield Int32(self, "unused[]", "(=-2)")
+            yield String(self, "clipboard_format", 4)
+        else:
+            yield PascalString32(self, "clipboard_format", strip="\0")
+        if self.current_size == self.size:
+            return
+
+        #-- OLE 2.01 ---
+
+        # Program ID
+        yield PascalString32(self, "prog_id", strip="\0")
+
+        if self["os_type"].value != OS_MAC:
+            # Magic number
+            yield textHandler(UInt32(self, "magic", "Magic number (0x71B239F4)"), hexadecimal)
+
+            # Unicode version
+            yield PascalStringWin32(self, "user_type_unicode", strip="\0")
+            yield PascalStringWin32(self, "clipboard_format_unicode", strip="\0")
+            yield PascalStringWin32(self, "prog_id_unicode", strip="\0")
+
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield NullBytes(self, "end_padding", size)
+
diff --git a/lib/hachoir_parser/misc/ole2.py b/lib/hachoir_parser/misc/ole2.py
new file mode 100644
index 0000000000000000000000000000000000000000..112b22b2d8bec1e3f7d8694eccacd6533cb67b12
--- /dev/null
+++ b/lib/hachoir_parser/misc/ole2.py
@@ -0,0 +1,367 @@
+"""
+Microsoft Office documents parser.
+
+Informations:
+* wordole.c of AntiWord program (v0.35)
+  Copyright (C) 1998-2003 A.J. van Os
+  Released under GNU GPL
+  http://www.winfield.demon.nl/
+* File gsf-infile-msole.c of libgsf library (v1.14.0)
+  Copyright (C) 2002-2004 Jody Goldberg (jody@gnome.org)
+  Released under GNU LGPL 2.1
+  http://freshmeat.net/projects/libgsf/
+* PDF from AAF Association
+  Copyright (C) 2004 AAF Association
+  Copyright (C) 1991-2003 Microsoft Corporation
+  http://www.aafassociation.org/html/specs/aafcontainerspec-v1.0.1.pdf
+
+Author: Victor Stinner
+Creation: 2006-04-23
+"""
+
+from hachoir_parser import HachoirParser
+from hachoir_core.field import (
+    FieldSet, ParserError, SeekableFieldSet, RootSeekableFieldSet,
+    UInt8, UInt16, UInt32, UInt64, TimestampWin64, Enum,
+    Bytes, RawBytes, NullBytes, String)
+from hachoir_core.text_handler import filesizeHandler
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_parser.common.win32 import GUID
+from hachoir_parser.misc.msoffice import CustomFragment, OfficeRootEntry, PROPERTY_NAME
+from hachoir_parser.misc.word_doc import WordDocumentParser
+from hachoir_parser.misc.msoffice_summary import SummaryParser
+
+MIN_BIG_BLOCK_LOG2 = 6   # 512 bytes
+MAX_BIG_BLOCK_LOG2 = 14  # 64 kB
+
+# Number of items in DIFAT
+NB_DIFAT = 109
+
+class SECT(UInt32):
+    UNUSED       = 0xFFFFFFFF   # -1
+    END_OF_CHAIN = 0xFFFFFFFE   # -2
+    BFAT_SECTOR  = 0xFFFFFFFD   # -3
+    DIFAT_SECTOR = 0xFFFFFFFC   # -4
+    SPECIALS = set((END_OF_CHAIN, UNUSED, BFAT_SECTOR, DIFAT_SECTOR))
+
+    special_value_name = {
+        UNUSED: "unused",
+        END_OF_CHAIN: "end of a chain",
+        BFAT_SECTOR: "BFAT sector (in a FAT)",
+        DIFAT_SECTOR: "DIFAT sector (in a FAT)",
+    }
+
+    def __init__(self, parent, name, description=None):
+        UInt32.__init__(self, parent, name, description)
+
+    def createDisplay(self):
+        val = self.value
+        return SECT.special_value_name.get(val, str(val))
+
+class Property(FieldSet):
+    TYPE_ROOT = 5
+    TYPE_NAME = {
+        1: "storage",
+        2: "stream",
+        3: "ILockBytes",
+        4: "IPropertyStorage",
+        5: "root"
+    }
+    DECORATOR_NAME = {
+        0: "red",
+        1: "black",
+    }
+    static_size = 128 * 8
+
+    def createFields(self):
+        bytes = self.stream.readBytes(self.absolute_address, 4)
+        if bytes == "\0R\0\0":
+            charset = "UTF-16-BE"
+        else:
+            charset = "UTF-16-LE"
+        yield String(self, "name", 64, charset=charset, truncate="\0")
+        yield UInt16(self, "namelen", "Length of the name")
+        yield Enum(UInt8(self, "type", "Property type"), self.TYPE_NAME)
+        yield Enum(UInt8(self, "decorator", "Decorator"), self.DECORATOR_NAME)
+        yield SECT(self, "left")
+        yield SECT(self, "right")
+        yield SECT(self, "child", "Child node (valid for storage and root types)")
+        yield GUID(self, "clsid", "CLSID of this storage (valid for storage and root types)")
+        yield NullBytes(self, "flags", 4, "User flags")
+        yield TimestampWin64(self, "creation", "Creation timestamp(valid for storage and root types)")
+        yield TimestampWin64(self, "lastmod", "Modify timestamp (valid for storage and root types)")
+        yield SECT(self, "start", "Starting SECT of the stream (valid for stream and root types)")
+        if self["/header/bb_shift"].value == 9:
+            yield filesizeHandler(UInt32(self, "size", "Size in bytes (valid for stream and root types)"))
+            yield NullBytes(self, "padding", 4)
+        else:
+            yield filesizeHandler(UInt64(self, "size", "Size in bytes (valid for stream and root types)"))
+
+    def createDescription(self):
+        name = self["name"].display
+        size = self["size"].display
+        return "Property: %s (%s)" % (name, size)
+
+class DIFat(SeekableFieldSet):
+    def __init__(self, parent, name, db_start, db_count, description=None):
+        SeekableFieldSet.__init__(self, parent, name, description)
+        self.start=db_start
+        self.count=db_count
+
+    def createFields(self):
+        for index in xrange(NB_DIFAT):
+            yield SECT(self, "index[%u]" % index)
+
+        for index in xrange(self.count):
+            # this is relative to real DIFAT start
+            self.seekBit(NB_DIFAT * SECT.static_size+self.parent.sector_size*(self.start+index))
+            for sect_index in xrange(NB_DIFAT*(index+1),NB_DIFAT*(index+2)):
+                yield SECT(self, "index[%u]" % sect_index)
+
+class Header(FieldSet):
+    static_size = 68 * 8
+    def createFields(self):
+        yield GUID(self, "clsid", "16 bytes GUID used by some apps")
+        yield UInt16(self, "ver_min", "Minor version")
+        yield UInt16(self, "ver_maj", "Minor version")
+        yield Bytes(self, "endian", 2, "Endian (0xFFFE for Intel)")
+        yield UInt16(self, "bb_shift", "Log, base 2, of the big block size")
+        yield UInt16(self, "sb_shift", "Log, base 2, of the small block size")
+        yield NullBytes(self, "reserved[]", 6, "(reserved)")
+        yield UInt32(self, "csectdir", "Number of SECTs in directory chain for 4 KB sectors (version 4)")
+        yield UInt32(self, "bb_count", "Number of Big Block Depot blocks")
+        yield SECT(self, "bb_start", "Root start block")
+        yield NullBytes(self, "transaction", 4, "Signature used for transactions (must be zero)")
+        yield UInt32(self, "threshold", "Maximum size for a mini stream (typically 4096 bytes)")
+        yield SECT(self, "sb_start", "Small Block Depot start block")
+        yield UInt32(self, "sb_count")
+        yield SECT(self, "db_start", "First block of DIFAT")
+        yield UInt32(self, "db_count", "Number of SECTs in DIFAT")
+
+# Header (ole_id, header, difat) size in bytes
+HEADER_SIZE = 64 + Header.static_size + NB_DIFAT * SECT.static_size
+
+class SectFat(FieldSet):
+    def __init__(self, parent, name, start, count, description=None):
+        FieldSet.__init__(self, parent, name, description, size=count*32)
+        self.count = count
+        self.start = start
+
+    def createFields(self):
+        for i in xrange(self.start, self.start + self.count):
+            yield SECT(self, "index[%u]" % i)
+
+class OLE2_File(HachoirParser, RootSeekableFieldSet):
+    PARSER_TAGS = {
+        "id": "ole2",
+        "category": "misc",
+        "file_ext": (
+            "doc", "dot",                # Microsoft Word
+            "ppt", "ppz", "pps", "pot",  # Microsoft Powerpoint
+            "xls", "xla",                # Microsoft Excel
+            "msi",                       # Windows installer
+        ),
+        "mime": (
+            u"application/msword",
+            u"application/msexcel",
+            u"application/mspowerpoint",
+        ),
+        "min_size": 512*8,
+        "description": "Microsoft Office document",
+        "magic": (("\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1", 0),),
+    }
+    endian = LITTLE_ENDIAN
+
+    def __init__(self, stream, **args):
+        RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self))
+        HachoirParser.__init__(self, stream, **args)
+
+    def validate(self):
+        if self["ole_id"].value != "\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1":
+            return "Invalid magic"
+        if self["header/ver_maj"].value not in (3, 4):
+            return "Unknown major version (%s)" % self["header/ver_maj"].value
+        if self["header/endian"].value not in ("\xFF\xFE", "\xFE\xFF"):
+            return "Unknown endian (%s)" % self["header/endian"].raw_display
+        if not(MIN_BIG_BLOCK_LOG2 <= self["header/bb_shift"].value <= MAX_BIG_BLOCK_LOG2):
+            return "Invalid (log 2 of) big block size (%s)" % self["header/bb_shift"].value
+        if self["header/bb_shift"].value < self["header/sb_shift"].value:
+            return "Small block size (log2=%s) is bigger than big block size (log2=%s)!" \
+                % (self["header/sb_shift"].value, self["header/bb_shift"].value)
+        return True
+
+    def createFields(self):
+        # Signature
+        yield Bytes(self, "ole_id", 8, "OLE object signature")
+
+        header = Header(self, "header")
+        yield header
+
+        # Configure values
+        self.sector_size = (8 << header["bb_shift"].value)
+        self.fat_count = header["bb_count"].value
+        self.items_per_bbfat = self.sector_size / SECT.static_size
+        self.ss_size = (8 << header["sb_shift"].value)
+        self.items_per_ssfat = self.items_per_bbfat
+
+        # Read DIFAT (one level of indirection)
+        yield DIFat(self, "difat",  header["db_start"].value, header["db_count"].value, "Double Indirection FAT")
+
+        # Read FAT (one level of indirection)
+        for field in self.readBFAT():
+            yield field
+
+        # Read SFAT
+        for field in self.readSFAT():
+            yield field
+
+        # Read properties
+        chain = self.getChain(self["header/bb_start"].value)
+        prop_per_sector = self.sector_size // Property.static_size
+        self.properties = []
+        for block in chain:
+            self.seekBlock(block)
+            for index in xrange(prop_per_sector):
+                property = Property(self, "property[]")
+                yield property
+                self.properties.append(property)
+
+        # Parse first property
+        for index, property in enumerate(self.properties):
+            if index == 0:
+                name = "root"
+            else:
+                try:
+                    name = PROPERTY_NAME[property["name"].value]
+                except LookupError:
+                    name = property.name+"content"
+            for field in self.parseProperty(property, name):
+                yield field
+
+    def parseProperty(self, property, name_prefix):
+        if not property["size"].value:
+            return
+        if property.name != "property[0]" \
+        and (property["size"].value < self["header/threshold"].value):
+            # Field is stored in the ministream, skip it
+            return
+        name = "%s[]" % name_prefix
+        first = None
+        previous = None
+        size = 0
+        fragment_group = None
+        chain = self.getChain(property["start"].value)
+        while True:
+            try:
+                block = chain.next()
+                contiguous = False
+                if not first:
+                    first = block
+                    contiguous = True
+                if previous and block == (previous+1):
+                    contiguous = True
+                if contiguous:
+                    previous = block
+                    size += self.sector_size
+                    continue
+            except StopIteration:
+                block = None
+            if first is None:
+                break
+            self.seekBlock(first)
+            desc = "Big blocks %s..%s (%s)" % (first, previous, previous-first+1)
+            desc += " of %s bytes" % (self.sector_size // 8)
+            if name_prefix in set(("root", "summary", "doc_summary", "word_doc")):
+                if name_prefix == "root":
+                    parser = OfficeRootEntry
+                elif name_prefix == "word_doc":
+                    parser = WordDocumentParser
+                else:
+                    parser = SummaryParser
+                field = CustomFragment(self, name, size, parser, desc, fragment_group)
+                yield field
+                if not fragment_group:
+                    fragment_group = field.group
+            else:
+                yield RawBytes(self, name, size//8, desc)
+            if block is None:
+                break
+            first = block
+            previous = block
+            size = self.sector_size
+
+    def getChain(self, start, use_sfat=False):
+        if use_sfat:
+            fat = self.ss_fat
+            items_per_fat = self.items_per_ssfat
+            err_prefix = "SFAT chain"
+        else:
+            fat = self.bb_fat
+            items_per_fat = self.items_per_bbfat
+            err_prefix = "BFAT chain"
+        block = start
+        block_set = set()
+        previous = block
+        while block != SECT.END_OF_CHAIN:
+            if block in SECT.SPECIALS:
+                raise ParserError("%s: Invalid block index (0x%08x), previous=%s" % (err_prefix, block, previous))
+            if block in block_set:
+                raise ParserError("%s: Found a loop (%s=>%s)" % (err_prefix, previous, block))
+            block_set.add(block)
+            yield block
+            previous = block
+            index = block // items_per_fat
+            try:
+                block = fat[index]["index[%u]" % block].value
+            except LookupError:
+                break
+
+    def readBFAT(self):
+        self.bb_fat = []
+        start = 0
+        count = self.items_per_bbfat
+        for index, block in enumerate(self.array("difat/index")):
+            block = block.value
+            if block == SECT.UNUSED:
+                break
+
+            desc = "FAT %u/%u at block %u" % \
+                (1+index, self["header/bb_count"].value, block)
+
+            self.seekBlock(block)
+            field = SectFat(self, "bbfat[]", start, count, desc)
+            yield field
+            self.bb_fat.append(field)
+
+            start += count
+
+    def readSFAT(self):
+        chain = self.getChain(self["header/sb_start"].value)
+        start = 0
+        self.ss_fat = []
+        count = self.items_per_ssfat
+        for index, block in enumerate(chain):
+            self.seekBlock(block)
+            field = SectFat(self, "sfat[]", \
+                start, count, \
+                "SFAT %u/%u at block %u" % \
+                (1+index, self["header/sb_count"].value, block))
+            yield field
+            self.ss_fat.append(field)
+            start += count
+
+    def createContentSize(self):
+        max_block = 0
+        for fat in self.array("bbfat"):
+            for entry in fat:
+                block = entry.value
+                if block not in SECT.SPECIALS:
+                    max_block = max(block, max_block)
+        if max_block in SECT.SPECIALS:
+            return None
+        else:
+            return HEADER_SIZE + (max_block+1) * self.sector_size
+
+    def seekBlock(self, block):
+        self.seekBit(HEADER_SIZE + block * self.sector_size)
+
diff --git a/lib/hachoir_parser/misc/pcf.py b/lib/hachoir_parser/misc/pcf.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d116bbafe378dccb8127ea64349ab35bdca0e1c
--- /dev/null
+++ b/lib/hachoir_parser/misc/pcf.py
@@ -0,0 +1,170 @@
+"""
+X11 Portable Compiled Font (pcf) parser.
+
+Documents:
+- Format for X11 pcf bitmap font files
+  http://fontforge.sourceforge.net/pcf-format.html
+  (file is based on the X11 sources)
+
+Author: Victor Stinner
+Creation date: 2007-03-20
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, Enum,
+    UInt8, UInt32, Bytes, RawBytes, NullBytes,
+    Bit, Bits, PaddingBits, CString)
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+from hachoir_core.tools import paddingSize
+
+class TOC(FieldSet):
+    TYPE_NAME = {
+        0x00000001: "Properties",
+        0x00000002: "Accelerators",
+        0x00000004: "Metrics",
+        0x00000008: "Bitmaps",
+        0x00000010: "Ink metrics",
+        0x00000020: "BDF encodings",
+        0x00000040: "SWidths",
+        0x00000080: "Glyph names",
+        0x00000100: "BDF accelerators",
+    }
+
+    FORMAT_NAME = {
+        0x00000000: "Default",
+        0x00000200: "Ink bounds",
+        0x00000100: "Accelerator W ink bounds",
+#        0x00000200: "Compressed metrics",
+    }
+
+    def createFields(self):
+        yield Enum(UInt32(self, "type"), self.TYPE_NAME)
+        yield UInt32(self, "format")
+        yield filesizeHandler(UInt32(self, "size"))
+        yield UInt32(self, "offset")
+
+    def createDescription(self):
+        return "%s at %s (%s)" % (
+            self["type"].display, self["offset"].value, self["size"].display)
+
+class PropertiesFormat(FieldSet):
+    static_size = 32
+    endian = LITTLE_ENDIAN
+    def createFields(self):
+        yield Bits(self, "reserved[]", 2)
+        yield Bit(self, "byte_big_endian")
+        yield Bit(self, "bit_big_endian")
+        yield Bits(self, "scan_unit", 2)
+        yield textHandler(PaddingBits(self, "reserved[]", 26), hexadecimal)
+
+class Property(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "name_offset")
+        yield UInt8(self, "is_string")
+        yield UInt32(self, "value_offset")
+
+    def createDescription(self):
+        # FIXME: Use link or any better way to read name value
+        name = self["../name[%s]" % (self.index-2)].value
+        return "Property %s" % name
+
+class GlyphNames(FieldSet):
+    def __init__(self, parent, name, toc, description, size=None):
+        FieldSet.__init__(self, parent, name, description, size=size)
+        self.toc = toc
+        if self["format/byte_big_endian"].value:
+            self.endian = BIG_ENDIAN
+        else:
+            self.endian = LITTLE_ENDIAN
+
+    def createFields(self):
+        yield PropertiesFormat(self, "format")
+        yield UInt32(self, "count")
+        offsets = []
+        for index in xrange(self["count"].value):
+            offset = UInt32(self, "offset[]")
+            yield offset
+            offsets.append(offset.value)
+        yield UInt32(self, "total_str_length")
+        offsets.sort()
+        offset0 = self.current_size // 8
+        for offset in offsets:
+            padding = self.seekByte(offset0+offset)
+            if padding:
+                yield padding
+            yield CString(self, "name[]")
+        padding = (self.size - self.current_size) // 8
+        if padding:
+            yield NullBytes(self, "end_padding", padding)
+
+class Properties(GlyphNames):
+    def createFields(self):
+        yield PropertiesFormat(self, "format")
+        yield UInt32(self, "nb_prop")
+        properties = []
+        for index in xrange(self["nb_prop"].value):
+            property = Property(self, "property[]")
+            yield property
+            properties.append(property)
+        padding = paddingSize(self.current_size//8, 4)
+        if padding:
+            yield NullBytes(self, "padding", padding)
+        yield UInt32(self, "total_str_length")
+        properties.sort(key=lambda entry: entry["name_offset"].value)
+        offset0 = self.current_size // 8
+        for property in properties:
+            padding = self.seekByte(offset0+property["name_offset"].value)
+            if padding:
+                yield padding
+            yield CString(self, "name[]", "Name of %s" % property.name)
+            if property["is_string"].value:
+                yield CString(self, "value[]", "Value of %s" % property.name)
+        padding = (self.size - self.current_size) // 8
+        if padding:
+            yield NullBytes(self, "end_padding", padding)
+
+class PcfFile(Parser):
+    MAGIC = "\1fcp"
+    PARSER_TAGS = {
+        "id": "pcf",
+        "category": "misc",
+        "file_ext": ("pcf",),
+        "magic": ((MAGIC, 0),),
+        "min_size": 32, # FIXME
+        "description": "X11 Portable Compiled Font (pcf)",
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        if self["signature"].value != self.MAGIC:
+            return "Invalid signature"
+        return True
+
+    def createFields(self):
+        yield Bytes(self, "signature", 4, r'File signature ("\1pcf")')
+        yield UInt32(self, "nb_toc")
+        entries = []
+        for index in xrange(self["nb_toc"].value):
+            entry = TOC(self, "toc[]")
+            yield entry
+            entries.append(entry)
+        entries.sort(key=lambda entry: entry["offset"].value)
+        for entry in entries:
+            size = entry["size"].value
+            padding = self.seekByte(entry["offset"].value)
+            if padding:
+                yield padding
+            maxsize = (self.size-self.current_size)//8
+            if maxsize < size:
+                self.warning("Truncate content of %s to %s bytes (was %s)" % (entry.path, maxsize, size))
+                size = maxsize
+            if not size:
+                continue
+            if entry["type"].value == 1:
+                yield Properties(self, "properties", entry, "Properties", size=size*8)
+            elif entry["type"].value == 128:
+                yield GlyphNames(self, "glyph_names", entry, "Glyph names", size=size*8)
+            else:
+                yield RawBytes(self, "data[]", size, "Content of %s" % entry.path)
+
diff --git a/lib/hachoir_parser/misc/pdf.py b/lib/hachoir_parser/misc/pdf.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69ba85e3ff831fd670aafad3c85e8a5ccf91158
--- /dev/null
+++ b/lib/hachoir_parser/misc/pdf.py
@@ -0,0 +1,442 @@
+"""
+Adobe Portable Document Format (PDF) parser.
+
+Author: Christophe Gisquet <christophe.gisquet@free.fr>
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (
+    Field, FieldSet,
+    ParserError,
+    GenericVector,
+    UInt8, UInt16, UInt32,
+    String,
+    RawBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+MAGIC = "%PDF-"
+ENDMAGIC = "%%EOF"
+
+def getLineEnd(s, pos=None):
+    if pos == None:
+        pos = (s.absolute_address+s.current_size)//8
+    end = s.stream.searchBytesLength("\x0D", False, 8*pos)
+    other_end = s.stream.searchBytesLength("\x0A", False, 8*pos)
+    if end == None or (other_end != None and other_end < end):
+        return other_end
+    return end
+
+# TODO: rewrite to account for all possible terminations: ' ', '/', '\0XD'
+#       But this probably requires changing *ALL* of the places they are used,
+#       as ' ' is swallowed but not the others
+def getElementEnd(s, limit=' ', offset=0):
+    addr = s.absolute_address+s.current_size
+    addr += 8*offset
+    pos = s.stream.searchBytesLength(limit, True, addr)
+    if pos == None:
+        #s.info("Can't find '%s' starting at %u" % (limit, addr))
+        return None
+    return pos
+
+class PDFNumber(Field):
+    LIMITS = ['[', '/', '\x0D', ']']
+    """
+    sprintf("%i") or sprinf("%.?f")
+    """
+    def __init__(self, parent, name, desc=None):
+        Field.__init__(self, parent, name, description=desc)
+        # Get size
+        size = getElementEnd(parent)
+        for limit in self.LIMITS:
+            other_size = getElementEnd(parent, limit)
+            if other_size != None:
+                other_size -= 1
+                if size == None or other_size < size:
+                    size = other_size
+
+        self._size = 8*size
+
+        # Get value
+        val = parent.stream.readBytes(self.absolute_address, size)
+        self.info("Number: size=%u value='%s'" % (size, val))
+        if val.find('.') != -1:
+            self.createValue = lambda: float(val)
+        else:
+            self.createValue = lambda: int(val)
+
+class PDFString(Field):
+    """
+    A string of the shape:
+    ( This string \
+      uses 3 lines \
+      with the CR(LF) inhibited )
+    """
+    def __init__(self, parent, name, desc=None):
+        Field.__init__(self, parent, name, description=desc)
+        val = ""
+        count = 1
+        off = 1
+        while not parent.eof:
+            char = parent.stream.readBytes(self.absolute_address+8*off, 1)
+            # Non-ASCII
+            if not char.isalpha() or char == '\\':
+                off += 1
+                continue
+            if char == '(':
+                count += 1
+            if char == ')':
+                count -= 1
+            # Parenthesis block = 0 => end of string
+            if count == 0:
+                off += 1
+                break
+
+            # Add it to the string
+            val += char
+
+        self._size = 8*off
+        self.createValue = lambda: val
+
+class PDFName(Field):
+    LIMITS = ['[', '/', '<', ']']
+    """
+    String starting with '/', where characters may be written using their
+    ASCII code (exemple: '#20' would be ' '
+    ' ', ']' and '\0' are supposed not to be part of the name
+    """
+    def __init__(self, parent, name, desc=None):
+        Field.__init__(self, parent, name, description=desc)
+        if parent.stream.readBytes(self.absolute_address, 1) != '/':
+            raise ParserError("Unknown PDFName '%s'" %
+                              parent.stream.readBytes(self.absolute_address, 10))
+        size = getElementEnd(parent, offset=1)
+        #other_size = getElementEnd(parent, '[')-1
+        #if size == None or (other_size != None and other_size < size):
+        #    size = other_size
+        for limit in self.LIMITS:
+            other_size = getElementEnd(parent, limit, 1)
+            if other_size != None:
+                other_size -= 1
+                if size == None or other_size < size:
+                    #self.info("New size: %u" % other_size)
+                    size = other_size
+
+        self._size = 8*(size+1)
+        # Value should be without the initial '/' and final ' '
+        self.createValue = lambda: parent.stream.readBytes(self.absolute_address+8, size).strip(' ')
+
+class PDFID(Field):
+    """
+    Not described as an object, but let's do as it was.
+    This ID has the shape <hexadecimal ASCII string>
+    """
+    def __init__(self, parent, name, desc=None):
+        Field.__init__(self, parent, name, description=desc)
+        self._size = 8*getElementEnd(parent, '>')
+        self.createValue = lambda: parent.stream.readBytes(self.absolute_address+8, (self._size//8)-1)
+
+class NotABool(Exception): pass
+class PDFBool(Field):
+    """
+    "true" or "false" string standing for the boolean value
+    """
+    def __init__(self, parent, name, desc=None):
+        Field.__init__(self, parent, name, description=desc)
+        if parent.stream.readBytes(self.absolute_address, 4) == "true":
+            self._size = 4
+            self.createValue = lambda: True
+        elif parent.stream.readBytes(self.absolute_address, 5) == "false":
+            self._size = 5
+            self.createValue = lambda: False
+        raise NotABool
+
+class LineEnd(FieldSet):
+    """
+    Made of 0x0A, 0x0D (we may include several line ends)
+    """
+    def createFields(self):
+        while not self.eof:
+            addr = self.absolute_address+self.current_size
+            char = self.stream.readBytes(addr, 1)
+            if char == '\x0A':
+                yield UInt8(self, "lf", "Line feed")
+            elif char == '\x0D':
+                yield UInt8(self, "cr", "Line feed")
+            else:
+                self.info("Line ends at %u/%u, len %u" %
+                          (addr, self.stream._size, self.current_size))
+                break
+
+class PDFDictionaryPair(FieldSet):
+    def createFields(self):
+        yield PDFName(self, "name", getElementEnd(self))
+        for field in parsePDFType(self):
+            yield field
+
+class PDFDictionary(FieldSet):
+    def createFields(self):
+        yield String(self, "dict_start", 2)
+        while not self.eof:
+            addr = self.absolute_address+self.current_size
+            if self.stream.readBytes(addr, 2) != '>>':
+                for field in parsePDFType(self):
+                    yield field
+            else:
+                break
+        yield String(self, "dict_end", 2)
+
+class PDFArray(FieldSet):
+    """
+    Array of possibly non-homogeneous elements, starting with '[' and ending
+    with ']'
+    """
+    def createFields(self):
+        yield String(self, "array_start", 1)
+        while self.stream.readBytes(self.absolute_address+self.current_size, 1) != ']':
+            for field in parsePDFType(self):
+                yield field
+        yield String(self, "array_end", 1)
+
+def parsePDFType(s):
+    addr = s.absolute_address+s.current_size
+    char = s.stream.readBytes(addr, 1)
+    if char == '/':
+        yield PDFName(s, "type[]", getElementEnd(s))
+    elif char == '<':
+        if s.stream.readBytes(addr+8, 1) == '<':
+            yield PDFDictionary(s, "dict[]")
+        else:
+            yield PDFID(s, "id[]")
+    elif char == '(':
+        yield PDFString(s, "string[]")
+    elif char == '[':
+        yield PDFArray(s, "array[]")
+    else:
+        # First parse size
+        size = getElementEnd(s)
+        for limit in ['/', '>', '<']:
+            other_size = getElementEnd(s, limit)
+            if other_size != None:
+                other_size -= 1
+                if size == None or (other_size>0 and other_size < size):
+                    size = other_size
+
+        # Get element
+        name = s.stream.readBytes(addr, size)
+        char = s.stream.readBytes(addr+8*size+8, 1)
+        if name.count(' ') > 1 and char == '<':
+            # Probably a catalog
+            yield Catalog(s, "catalog[]")
+        elif name[0] in ('.','-','+', '0', '1', '2', '3', \
+                         '4', '5', '6', '7', '8', '9'):
+            s.info("Not a catalog: %u spaces and end='%s'" % (name.count(' '), char))
+            yield PDFNumber(s, "integer[]")
+        else:
+            s.info("Trying to parse '%s': %u bytes" % \
+                   (s.stream.readBytes(s.absolute_address+s.current_size, 4), size))
+            yield String(s, "unknown[]", size)
+
+class Header(FieldSet):
+    def createFields(self):
+        yield String(self, "marker", 5, MAGIC)
+        length = getLineEnd(self, 4)
+        if length != None:
+            #self.info("Found at position %08X" % len)
+            yield String(self, "version", length-1)
+            yield LineEnd(self, "line_end")
+        else:
+            self.warning("Can't determine version!")
+    def createDescription(self):
+        return "PDF version %s" % self["version"].display
+
+class Body(FieldSet):
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        pos = self.stream.searchBytesLength(CrossReferenceTable.MAGIC, False)
+        if pos == None:
+            raise ParserError("Can't find xref starting at %u" %
+                              (self.absolute_address//8))
+        self._size = 8*pos-self.absolute_address
+
+    def createFields(self):
+        while self.stream.readBytes(self.absolute_address+self.current_size, 1) == '%':
+            size = getLineEnd(self, 4)
+            if size == 2:
+                yield textHandler(UInt16(self, "crc32"), hexadecimal)
+            elif size == 4:
+                yield textHandler(UInt32(self, "crc32"), hexadecimal)
+            elif self.stream.readBytes(self.absolute_address+self.current_size, size).isalpha():
+                yield String(self, "comment[]", size)
+            else:
+                RawBytes(self, "unknown_data[]", size)
+            yield LineEnd(self, "line_end[]")
+
+        #abs_offset = self.current_size//8
+        # TODO: yield objects that read offsets and deduce size from
+        # "/cross_ref_table/sub_section[]/entries/item[]"
+        offsets = []
+        for subsection in self.array("/cross_ref_table/sub_section"):
+            for obj in subsection.array("entries/item"):
+                if "byte_offset" in obj:
+                    # Could be inserted already sorted
+                    offsets.append(obj["byte_offset"].value)
+
+        offsets.append(self["/cross_ref_table"].absolute_address//8)
+        offsets.sort()
+        for index in xrange(len(offsets)-1):
+            yield Catalog(self, "object[]", size=offsets[index+1]-offsets[index])
+
+class Entry(FieldSet):
+    static_size = 20*8
+    def createFields(self):
+        typ = self.stream.readBytes(self.absolute_address+17*8, 1)
+        if typ == 'n':
+            yield PDFNumber(self, "byte_offset")
+        elif typ == 'f':
+            yield PDFNumber(self, "next_free_object_number")
+        else:
+            yield PDFNumber(self, "unknown_string")
+        yield PDFNumber(self, "generation_number")
+        yield UInt8(self, "type")
+        yield LineEnd(self, "line_end")
+    def createDescription(self):
+        if self["type"].value == 'n':
+            return "In-use entry at offset %u" % int(self["byte_offset"].value)
+        elif self["type"].value == 'f':
+            return "Free entry before in-use object %u" % \
+                   int(self["next_free_object_number"].value)
+        else:
+            return "unknown %s" % self["unknown_string"].value
+
+class SubSection(FieldSet):
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, desc)
+        self.info("Got entry count: '%s'" % self["entry_count"].value)
+        self._size = self.current_size + 8*20*int(self["entry_count"].value) \
+                     + self["line_end"].size
+
+    def createFields(self):
+        yield PDFNumber(self, "start_number",
+                        "Object number of first entry in subsection")
+        self.info("start_number = %i" % self["start_number"].value)
+
+        yield PDFNumber(self, "entry_count", "Number of entries in subsection")
+        self.info("entry_count = %i" % self["entry_count"].value)
+        yield LineEnd(self, "line_end")
+        yield GenericVector(self, "entries", int(self["entry_count"].value),
+                            Entry)
+        #yield LineEnd(self, "line_end[]")
+    def createDescription(self):
+        return "Subsection with %s elements, starting at %s" % \
+               (self["entry_count"].value, self["start_number"])
+
+class CrossReferenceTable(FieldSet):
+    MAGIC = "xref"
+
+    def __init__(self, parent, name, desc=None):
+        FieldSet.__init__(self, parent, name, description=desc)
+        pos = self.stream.searchBytesLength(Trailer.MAGIC, False)
+        if pos == None:
+            raise ParserError("Can't find '%s' starting at %u" \
+                              (Trailer.MAGIC, self.absolute_address//8))
+        self._size = 8*pos-self.absolute_address
+
+    def createFields(self):
+        yield RawBytes(self, "marker", len(self.MAGIC))
+        yield LineEnd(self, "line_end[]")
+        while not self.eof:
+            yield SubSection(self, "sub_section[]")
+
+class Catalog(FieldSet):
+    END_NAME = ['<', '/', '[']
+    def __init__(self, parent, name, size=None, desc=None):
+        FieldSet.__init__(self, parent, name, description=desc)
+        if size != None:
+            self._size = 8*size
+        # object catalogs are ended with "obj"
+        elif self["object"].value == "obj":
+            size = self.stream.searchBytesLength("endobj", False)
+            if size != None:
+                self._size = 8*(size+2)
+    def createFields(self):
+        yield PDFNumber(self, "index")
+        yield PDFNumber(self, "unknown[]")
+        length = getElementEnd(self)
+        for limit in self.END_NAME:
+            new_length = getElementEnd(self, limit)-len(limit)
+            if length == None or (new_length != None and new_length < length):
+                length = new_length
+        yield String(self, "object", length, strip=' ')
+        if self.stream.readBytes(self.absolute_address+self.current_size, 2) == '<<':
+            yield PDFDictionary(self, "key_list")
+        # End of catalog: this one has "endobj"
+        if self["object"].value == "obj":
+            yield LineEnd(self, "line_end[]")
+            yield String(self, "end_object", len("endobj"))
+            yield LineEnd(self, "line_end[]")
+
+class Trailer(FieldSet):
+    MAGIC = "trailer"
+    def createFields(self):
+        yield RawBytes(self, "marker", len(self.MAGIC))
+        yield LineEnd(self, "line_end[]")
+        yield String(self, "start_attribute_marker", 2)
+        addr = self.absolute_address + self.current_size
+        while self.stream.readBytes(addr, 2) != '>>':
+            t = PDFName(self, "type[]")
+            yield t
+            name = t.value
+            self.info("Parsing PDFName '%s'" % name)
+            if name == "Size":
+                yield PDFNumber(self, "size", "Entries in the file cross-reference section")
+            elif name == "Prev":
+                yield PDFNumber(self, "offset")
+            elif name == "Root":
+                yield Catalog(self, "object_catalog")
+            elif name == "Info":
+                yield Catalog(self, "info")
+            elif name == "ID":
+                yield PDFArray(self, "id")
+            elif name == "Encrypt":
+                yield PDFDictionary(self, "decrypt")
+            else:
+                raise ParserError("Don't know trailer type '%s'" % name)
+            addr = self.absolute_address + self.current_size
+        yield String(self, "end_attribute_marker", 2)
+        yield LineEnd(self, "line_end[]")
+        yield String(self, "start_xref", 9)
+        yield LineEnd(self, "line_end[]")
+        yield PDFNumber(self, "cross_ref_table_start_address")
+        yield LineEnd(self, "line_end[]")
+        yield String(self, "end_marker", len(ENDMAGIC))
+        yield LineEnd(self, "line_end[]")
+
+class PDFDocument(Parser):
+    endian = LITTLE_ENDIAN
+    PARSER_TAGS = {
+        "id": "pdf",
+        "category": "misc",
+        "file_ext": ("pdf",),
+        "mime": (u"application/pdf",),
+        "min_size": (5+4)*8,
+        "magic": ((MAGIC, 5),),
+        "description": "Portable Document Format (PDF) document"
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, len(MAGIC)) != MAGIC:
+            return "Invalid magic string"
+        return True
+
+    # Size is not always determined by position of "%%EOF":
+    # - updated documents have several of those
+    # - PDF files should be parsed from *end*
+    # => TODO: find when a document has been updated
+
+    def createFields(self):
+        yield Header(self, "header")
+        yield Body(self, "body")
+        yield CrossReferenceTable(self, "cross_ref_table")
+        yield Trailer(self, "trailer")
+
diff --git a/lib/hachoir_parser/misc/pifv.py b/lib/hachoir_parser/misc/pifv.py
new file mode 100644
index 0000000000000000000000000000000000000000..d947473316467ae74c1f5d2ebbb039af5478f51e
--- /dev/null
+++ b/lib/hachoir_parser/misc/pifv.py
@@ -0,0 +1,241 @@
+"""
+EFI Platform Initialization Firmware Volume parser.
+
+Author: Alexandre Boeglin
+Creation date: 08 jul 2007
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt8, UInt16, UInt24, UInt32, UInt64, Enum,
+    CString, String, PaddingBytes, RawBytes, NullBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.tools import paddingSize, humanFilesize
+from hachoir_parser.common.win32 import GUID
+
+EFI_SECTION_COMPRESSION = 0x1
+EFI_SECTION_GUID_DEFINED = 0x2
+EFI_SECTION_PE32 = 0x10
+EFI_SECTION_PIC = 0x11
+EFI_SECTION_TE = 0x12
+EFI_SECTION_DXE_DEPEX = 0x13
+EFI_SECTION_VERSION = 0x14
+EFI_SECTION_USER_INTERFACE = 0x15
+EFI_SECTION_COMPATIBILITY16 = 0x16
+EFI_SECTION_FIRMWARE_VOLUME_IMAGE = 0x17
+EFI_SECTION_FREEFORM_SUBTYPE_GUID = 0x18
+EFI_SECTION_RAW = 0x19
+EFI_SECTION_PEI_DEPEX = 0x1b
+
+EFI_SECTION_TYPE = {
+    EFI_SECTION_COMPRESSION: "Encapsulation section where other sections" \
+        + " are compressed",
+    EFI_SECTION_GUID_DEFINED: "Encapsulation section where other sections" \
+        + " have format defined by a GUID",
+    EFI_SECTION_PE32: "PE32+ Executable image",
+    EFI_SECTION_PIC: "Position-Independent Code",
+    EFI_SECTION_TE: "Terse Executable image",
+    EFI_SECTION_DXE_DEPEX: "DXE Dependency Expression",
+    EFI_SECTION_VERSION: "Version, Text and Numeric",
+    EFI_SECTION_USER_INTERFACE: "User-Friendly name of the driver",
+    EFI_SECTION_COMPATIBILITY16: "DOS-style 16-bit EXE",
+    EFI_SECTION_FIRMWARE_VOLUME_IMAGE: "PI Firmware Volume image",
+    EFI_SECTION_FREEFORM_SUBTYPE_GUID: "Raw data with GUID in header to" \
+        + " define format",
+    EFI_SECTION_RAW: "Raw data",
+    EFI_SECTION_PEI_DEPEX: "PEI Dependency Expression",
+}
+
+EFI_FV_FILETYPE_RAW = 0x1
+EFI_FV_FILETYPE_FREEFORM = 0x2
+EFI_FV_FILETYPE_SECURITY_CORE = 0x3
+EFI_FV_FILETYPE_PEI_CORE = 0x4
+EFI_FV_FILETYPE_DXE_CORE = 0x5
+EFI_FV_FILETYPE_PEIM = 0x6
+EFI_FV_FILETYPE_DRIVER = 0x7
+EFI_FV_FILETYPE_COMBINED_PEIM_DRIVER = 0x8
+EFI_FV_FILETYPE_APPLICATION = 0x9
+EFI_FV_FILETYPE_FIRMWARE_VOLUME_IMAGE = 0xb
+EFI_FV_FILETYPE_FFS_PAD = 0xf0
+
+EFI_FV_FILETYPE = {
+    EFI_FV_FILETYPE_RAW: "Binary data",
+    EFI_FV_FILETYPE_FREEFORM: "Sectioned data",
+    EFI_FV_FILETYPE_SECURITY_CORE: "Platform core code used during the SEC" \
+        + " phase",
+    EFI_FV_FILETYPE_PEI_CORE: "PEI Foundation",
+    EFI_FV_FILETYPE_DXE_CORE: "DXE Foundation",
+    EFI_FV_FILETYPE_PEIM: "PEI module (PEIM)",
+    EFI_FV_FILETYPE_DRIVER: "DXE driver",
+    EFI_FV_FILETYPE_COMBINED_PEIM_DRIVER: "Combined PEIM/DXE driver",
+    EFI_FV_FILETYPE_APPLICATION: "Application",
+    EFI_FV_FILETYPE_FIRMWARE_VOLUME_IMAGE: "Firmware volume image",
+    EFI_FV_FILETYPE_FFS_PAD: "Pad File For FFS",
+}
+for x in xrange(0xc0, 0xe0):
+    EFI_FV_FILETYPE[x] = "OEM File"
+for x in xrange(0xe0, 0xf0):
+    EFI_FV_FILETYPE[x] = "Debug/Test File"
+for x in xrange(0xf1, 0x100):
+    EFI_FV_FILETYPE[x] = "Firmware File System Specific File"
+
+
+class BlockMap(FieldSet):
+    static_size = 8*8
+    def createFields(self):
+        yield UInt32(self, "num_blocks")
+        yield UInt32(self, "len")
+
+    def createDescription(self):
+        return "%d blocks of %s" % (
+            self["num_blocks"].value, humanFilesize(self["len"].value))
+
+
+class FileSection(FieldSet):
+    COMPRESSION_TYPE = {
+        0: 'Not Compressed',
+        1: 'Standard Compression',
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = self["size"].value * 8
+        section_type = self["type"].value
+        if section_type in (EFI_SECTION_DXE_DEPEX, EFI_SECTION_PEI_DEPEX):
+            # These sections can sometimes be longer than what their size
+            # claims! It's so nice to have so detailled specs and not follow
+            # them ...
+            if self.stream.readBytes(self.absolute_address +
+                self._size, 1) == '\0':
+                self._size = self._size + 16
+
+    def createFields(self):
+        # Header
+        yield UInt24(self, "size")
+        yield Enum(UInt8(self, "type"), EFI_SECTION_TYPE)
+        section_type = self["type"].value
+
+        if section_type == EFI_SECTION_COMPRESSION:
+            yield UInt32(self, "uncomp_len")
+            yield Enum(UInt8(self, "comp_type"), self.COMPRESSION_TYPE)
+        elif section_type == EFI_SECTION_FREEFORM_SUBTYPE_GUID:
+            yield GUID(self, "sub_type_guid")
+        elif section_type == EFI_SECTION_GUID_DEFINED:
+            yield GUID(self, "section_definition_guid")
+            yield UInt16(self, "data_offset")
+            yield UInt16(self, "attributes")
+        elif section_type == EFI_SECTION_USER_INTERFACE:
+            yield CString(self, "file_name", charset="UTF-16-LE")
+        elif section_type == EFI_SECTION_VERSION:
+            yield UInt16(self, "build_number")
+            yield CString(self, "version", charset="UTF-16-LE")
+
+        # Content
+        content_size = (self.size - self.current_size) // 8
+        if content_size == 0:
+            return
+
+        if section_type == EFI_SECTION_COMPRESSION:
+            compression_type = self["comp_type"].value
+            if compression_type == 1:
+                while not self.eof:
+                    yield RawBytes(self, "compressed_content", content_size)
+            else:
+                while not self.eof:
+                    yield FileSection(self, "section[]")
+        elif section_type == EFI_SECTION_FIRMWARE_VOLUME_IMAGE:
+            yield FirmwareVolume(self, "firmware_volume")
+        else:
+            yield RawBytes(self, "content", content_size,
+                EFI_SECTION_TYPE.get(self["type"].value,
+                "Unknown Section Type"))
+
+    def createDescription(self):
+        return EFI_SECTION_TYPE.get(self["type"].value,
+            "Unknown Section Type")
+
+
+class File(FieldSet):
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = self["size"].value * 8
+
+    def createFields(self):
+        # Header
+        yield GUID(self, "name")
+        yield UInt16(self, "integrity_check")
+        yield Enum(UInt8(self, "type"), EFI_FV_FILETYPE)
+        yield UInt8(self, "attributes")
+        yield UInt24(self, "size")
+        yield UInt8(self, "state")
+
+        # Content
+        while not self.eof:
+            yield FileSection(self, "section[]")
+
+    def createDescription(self):
+        return "%s: %s containing %d section(s)" % (
+            self["name"].value,
+            self["type"].display,
+            len(self.array("section")))
+
+
+class FirmwareVolume(FieldSet):
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        if not self._size:
+            self._size = self["volume_len"].value * 8
+
+    def createFields(self):
+        # Header
+        yield NullBytes(self, "zero_vector", 16)
+        yield GUID(self, "fs_guid")
+        yield UInt64(self, "volume_len")
+        yield String(self, "signature", 4)
+        yield UInt32(self, "attributes")
+        yield UInt16(self, "header_len")
+        yield UInt16(self, "checksum")
+        yield UInt16(self, "ext_header_offset")
+        yield UInt8(self, "reserved")
+        yield UInt8(self, "revision")
+        while True:
+            bm = BlockMap(self, "block_map[]")
+            yield bm
+            if bm['num_blocks'].value == 0 and bm['len'].value == 0:
+                break
+        # TODO must handle extended header
+
+        # Content
+        while not self.eof:
+            padding = paddingSize(self.current_size // 8, 8)
+            if padding:
+                yield PaddingBytes(self, "padding[]", padding)
+            yield File(self, "file[]")
+
+    def createDescription(self):
+        return "Firmware Volume containing %d file(s)" % len(self.array("file"))
+
+
+class PIFVFile(Parser):
+    endian = LITTLE_ENDIAN
+    MAGIC = '_FVH'
+    PARSER_TAGS = {
+        "id": "pifv",
+        "category": "program",
+        "file_ext": ("bin", ""),
+        "min_size": 64*8, # smallest possible header
+        "magic_regex": (("\0{16}.{24}%s" % MAGIC, 0), ),
+        "description": "EFI Platform Initialization Firmware Volume",
+    }
+
+    def validate(self):
+        if self.stream.readBytes(40*8, 4) != self.MAGIC:
+            return "Invalid magic number"
+        if self.stream.readBytes(0, 16) != "\0"*16:
+            return "Invalid zero vector"
+        return True
+
+    def createFields(self):
+        while not self.eof:
+            yield FirmwareVolume(self, "firmware_volume[]")
+
diff --git a/lib/hachoir_parser/misc/torrent.py b/lib/hachoir_parser/misc/torrent.py
new file mode 100644
index 0000000000000000000000000000000000000000..88a1bea2f349e60e01f8f836f8b320035e5fc1ef
--- /dev/null
+++ b/lib/hachoir_parser/misc/torrent.py
@@ -0,0 +1,163 @@
+"""
+.torrent metainfo file parser
+
+http://wiki.theory.org/BitTorrentSpecification#Metainfo_File_Structure
+
+Status: To statufy
+Author: Christophe Gisquet <christophe.gisquet@free.fr>
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    String, RawBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.tools import makePrintable, timestampUNIX, humanFilesize
+
+# Maximum number of bytes for string length
+MAX_STRING_LENGTH = 6   # length in 0..999999
+
+# Maximum number of bytes for integer value
+MAX_INTEGER_SIZE = 21    # 21 decimal digits (or "-" sign and 20 digits)
+
+class Integer(FieldSet):
+    # i<integer encoded in base ten ASCII>e
+    def createFields(self):
+        yield String(self, "start", 1, "Integer start delimiter (i)", charset="ASCII")
+
+        # Find integer end
+        addr = self.absolute_address+self.current_size
+        len = self.stream.searchBytesLength('e', False, addr, addr+(MAX_INTEGER_SIZE+1)*8)
+        if len is None:
+            raise ParserError("Torrent: Unable to find integer end delimiter (e)!")
+        if not len:
+            raise ParserError("Torrent: error, empty integer!")
+
+        yield String(self, "value", len, "Integer value", charset="ASCII")
+        yield String(self, "end", 1, "Integer end delimiter")
+
+    def createValue(self):
+        """Read integer value (may raise ValueError)"""
+        return int(self["value"].value)
+
+class TorrentString(FieldSet):
+    # <string length encoded in base ten ASCII>:<string data>
+
+    def createFields(self):
+        addr = self.absolute_address
+        len = self.stream.searchBytesLength(':', False, addr, addr+(MAX_STRING_LENGTH+1)*8)
+        if len is None:
+            raise ParserError("Torrent: unable to find string separator (':')")
+        if not len:
+            raise ParserError("Torrent: error: no string length!")
+        val = String(self, "length", len, "String length")
+        yield val
+        try:
+            len = int(val.value)
+        except ValueError:
+            len = -1
+        if len < 0:
+            raise ParserError("Invalid string length (%s)" % makePrintable(val.value, "ASCII", to_unicode=True))
+        yield String(self, "separator", 1, "String length/value separator")
+        if not len:
+            self.info("Empty string: len=%i" % len)
+            return
+        if len<512:
+            yield String(self, "value", len, "String value", charset="ISO-8859-1")
+        else:
+            # Probably raw data
+            yield RawBytes(self, "value", len, "Raw data")
+
+    def createValue(self):
+        if "value" in self:
+            field = self["value"]
+            if field.__class__ != RawBytes:
+                return field.value
+            else:
+                return None
+        else:
+            return None
+
+class Dictionary(FieldSet):
+    # d<bencoded string><bencoded element>e
+    def createFields(self):
+        yield String(self, "start", 1, "Dictionary start delimiter (d)", charset="ASCII")
+        while self.stream.readBytes(self.absolute_address+self.current_size, 1) != "e":
+            yield DictionaryItem(self, "item[]")
+        yield String(self, "end", 1, "Dictionary end delimiter")
+
+class List(FieldSet):
+    # l<bencoded values>e
+    def createFields(self):
+        yield String(self, "start", 1, "List start delimiter")
+        while self.stream.readBytes(self.absolute_address+self.current_size, 1) != "e":
+            yield Entry(self, "item[]")
+        yield String(self, "end", 1, "List end delimiter")
+
+class DictionaryItem(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+
+        # TODO: Remove this because it's not lazy?
+        key = self["key"]
+        if not key.hasValue():
+            return
+        key = key.value
+        self._name = str(key).replace(" ", "_")
+
+    def createDisplay(self):
+        if not self["value"].hasValue():
+            return None
+        if self._name in ("length", "piece_length"):
+            return humanFilesize(self.value)
+        return FieldSet.createDisplay(self)
+
+    def createValue(self):
+        if not self["value"].hasValue():
+            return None
+        if self._name == "creation_date":
+            return self.createTimestampValue()
+        else:
+            return self["value"].value
+
+    def createFields(self):
+        yield Entry(self, "key")
+        yield Entry(self, "value")
+
+    def createTimestampValue(self):
+        return timestampUNIX(self["value"].value)
+
+# Map first chunk byte => type
+TAGS = {'d': Dictionary, 'i': Integer, 'l': List}
+for index in xrange(1, 9+1):
+    TAGS[str(index)] = TorrentString
+
+# Create an entry
+def Entry(parent, name):
+    addr = parent.absolute_address + parent.current_size
+    tag = parent.stream.readBytes(addr, 1)
+    if tag not in TAGS:
+        raise ParserError("Torrent: Entry of type %r not handled" % type)
+    cls = TAGS[tag]
+    return cls(parent, name)
+
+class TorrentFile(Parser):
+    endian = LITTLE_ENDIAN
+    MAGIC = "d8:announce"
+    PARSER_TAGS = {
+        "id": "torrent",
+        "category": "misc",
+        "file_ext": ("torrent",),
+        "min_size": 50*8,
+        "mime": (u"application/x-bittorrent",),
+        "magic": ((MAGIC, 0),),
+        "description": "Torrent metainfo file"
+    }
+
+    def validate(self):
+        if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC:
+            return "Invalid magic"
+        return True
+
+    def createFields(self):
+        yield Dictionary(self, "root", size=self.size)
+
diff --git a/lib/hachoir_parser/misc/ttf.py b/lib/hachoir_parser/misc/ttf.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1024aabc794f69900de372116096f220f13bcb9
--- /dev/null
+++ b/lib/hachoir_parser/misc/ttf.py
@@ -0,0 +1,277 @@
+"""
+TrueType Font parser.
+
+Documents:
+ - "An Introduction to TrueType Fonts: A look inside the TTF format"
+   written by "NRSI: Computers & Writing Systems"
+   http://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&item_id=IWS-Chapter08
+
+Author: Victor Stinner
+Creation date: 2007-02-08
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt16, UInt32, Bit, Bits,
+    PaddingBits, NullBytes,
+    String, RawBytes, Bytes, Enum,
+    TimestampMac32)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+
+MAX_NAME_COUNT = 300
+MIN_NB_TABLE = 3
+MAX_NB_TABLE = 30
+
+DIRECTION_NAME = {
+    0: u"Mixed directional",
+    1: u"Left to right",
+    2: u"Left to right + neutrals",
+   -1: u"Right to left",
+   -2: u"Right to left + neutrals",
+}
+
+NAMEID_NAME = {
+     0: u"Copyright notice",
+     1: u"Font family name",
+     2: u"Font subfamily name",
+     3: u"Unique font identifier",
+     4: u"Full font name",
+     5: u"Version string",
+     6: u"Postscript name",
+     7: u"Trademark",
+     8: u"Manufacturer name",
+     9: u"Designer",
+    10: u"Description",
+    11: u"URL Vendor",
+    12: u"URL Designer",
+    13: u"License Description",
+    14: u"License info URL",
+    16: u"Preferred Family",
+    17: u"Preferred Subfamily",
+    18: u"Compatible Full",
+    19: u"Sample text",
+    20: u"PostScript CID findfont name",
+}
+
+PLATFORM_NAME = {
+    0: "Unicode",
+    1: "Macintosh",
+    2: "ISO",
+    3: "Microsoft",
+    4: "Custom",
+}
+
+CHARSET_MAP = {
+    # (platform, encoding) => charset
+    0: {3: "UTF-16-BE"},
+    1: {0: "MacRoman"},
+    3: {1: "UTF-16-BE"},
+}
+
+class TableHeader(FieldSet):
+    def createFields(self):
+        yield String(self, "tag", 4)
+        yield textHandler(UInt32(self, "checksum"), hexadecimal)
+        yield UInt32(self, "offset")
+        yield filesizeHandler(UInt32(self, "size"))
+
+    def createDescription(self):
+         return "Table entry: %s (%s)" % (self["tag"].display, self["size"].display)
+
+class NameHeader(FieldSet):
+    def createFields(self):
+        yield Enum(UInt16(self, "platformID"), PLATFORM_NAME)
+        yield UInt16(self, "encodingID")
+        yield UInt16(self, "languageID")
+        yield Enum(UInt16(self, "nameID"), NAMEID_NAME)
+        yield UInt16(self, "length")
+        yield UInt16(self, "offset")
+
+    def getCharset(self):
+        platform = self["platformID"].value
+        encoding = self["encodingID"].value
+        try:
+            return CHARSET_MAP[platform][encoding]
+        except KeyError:
+            self.warning("TTF: Unknown charset (%s,%s)" % (platform, encoding))
+            return "ISO-8859-1"
+
+    def createDescription(self):
+        platform = self["platformID"].display
+        name = self["nameID"].display
+        return "Name record: %s (%s)" % (name, platform)
+
+def parseFontHeader(self):
+    yield UInt16(self, "maj_ver", "Major version")
+    yield UInt16(self, "min_ver", "Minor version")
+    yield UInt16(self, "font_maj_ver", "Font major version")
+    yield UInt16(self, "font_min_ver", "Font minor version")
+    yield textHandler(UInt32(self, "checksum"), hexadecimal)
+    yield Bytes(self, "magic", 4, r"Magic string (\x5F\x0F\x3C\xF5)")
+    if self["magic"].value != "\x5F\x0F\x3C\xF5":
+        raise ParserError("TTF: invalid magic of font header")
+
+    # Flags
+    yield Bit(self, "y0", "Baseline at y=0")
+    yield Bit(self, "x0", "Left sidebearing point at x=0")
+    yield Bit(self, "instr_point", "Instructions may depend on point size")
+    yield Bit(self, "ppem", "Force PPEM to integer values for all")
+    yield Bit(self, "instr_width", "Instructions may alter advance width")
+    yield Bit(self, "vertical", "e laid out vertically?")
+    yield PaddingBits(self, "reserved[]", 1)
+    yield Bit(self, "linguistic", "Requires layout for correct linguistic rendering?")
+    yield Bit(self, "gx", "Metamorphosis effects?")
+    yield Bit(self, "strong", "Contains strong right-to-left glyphs?")
+    yield Bit(self, "indic", "contains Indic-style rearrangement effects?")
+    yield Bit(self, "lossless", "Data is lossless (Agfa MicroType compression)")
+    yield Bit(self, "converted", "Font converted (produce compatible metrics)")
+    yield Bit(self, "cleartype", "Optimised for ClearType")
+    yield Bits(self, "adobe", 2, "(used by Adobe)")
+
+    yield UInt16(self, "unit_per_em", "Units per em")
+    if not(16 <= self["unit_per_em"].value <= 16384):
+        raise ParserError("TTF: Invalid unit/em value")
+    yield UInt32(self, "created_high")
+    yield TimestampMac32(self, "created")
+    yield UInt32(self, "modified_high")
+    yield TimestampMac32(self, "modified")
+    yield UInt16(self, "xmin")
+    yield UInt16(self, "ymin")
+    yield UInt16(self, "xmax")
+    yield UInt16(self, "ymax")
+
+    # Mac style
+    yield Bit(self, "bold")
+    yield Bit(self, "italic")
+    yield Bit(self, "underline")
+    yield Bit(self, "outline")
+    yield Bit(self, "shadow")
+    yield Bit(self, "condensed", "(narrow)")
+    yield Bit(self, "expanded")
+    yield PaddingBits(self, "reserved[]", 9)
+
+    yield UInt16(self, "lowest", "Smallest readable size in pixels")
+    yield Enum(UInt16(self, "font_dir", "Font direction hint"), DIRECTION_NAME)
+    yield Enum(UInt16(self, "ofst_format"), {0: "short offsets", 1: "long"})
+    yield UInt16(self, "glyph_format", "(=0)")
+
+def parseNames(self):
+    # Read header
+    yield UInt16(self, "format")
+    if self["format"].value != 0:
+        raise ParserError("TTF (names): Invalid format (%u)" % self["format"].value)
+    yield UInt16(self, "count")
+    yield UInt16(self, "offset")
+    if MAX_NAME_COUNT < self["count"].value:
+        raise ParserError("Invalid number of names (%s)"
+            % self["count"].value)
+
+    # Read name index
+    entries = []
+    for index in xrange(self["count"].value):
+        entry = NameHeader(self, "header[]")
+        yield entry
+        entries.append(entry)
+
+    # Sort names by their offset
+    entries.sort(key=lambda field: field["offset"].value)
+
+    # Read name value
+    last = None
+    for entry in entries:
+        # Skip duplicates values
+        new = (entry["offset"].value, entry["length"].value)
+        if last and last == new:
+            self.warning("Skip duplicate %s %s" % (entry.name, new))
+            continue
+        last = (entry["offset"].value, entry["length"].value)
+
+        # Skip negative offset
+        offset = entry["offset"].value + self["offset"].value
+        if offset < self.current_size//8:
+            self.warning("Skip value %s (negative offset)" % entry.name)
+            continue
+
+        # Add padding if any
+        padding = self.seekByte(offset, relative=True, null=True)
+        if padding:
+            yield padding
+
+        # Read value
+        size = entry["length"].value
+        if size:
+            yield String(self, "value[]", size, entry.description, charset=entry.getCharset())
+
+    padding = (self.size - self.current_size) // 8
+    if padding:
+        yield NullBytes(self, "padding_end", padding)
+
+class Table(FieldSet):
+    TAG_INFO = {
+        "head": ("header", "Font header", parseFontHeader),
+        "name": ("names", "Names", parseNames),
+    }
+
+    def __init__(self, parent, name, table, **kw):
+        FieldSet.__init__(self, parent, name, **kw)
+        self.table = table
+        tag = table["tag"].value
+        if tag in self.TAG_INFO:
+            self._name, self._description, self.parser = self.TAG_INFO[tag]
+        else:
+            self.parser = None
+
+    def createFields(self):
+        if self.parser:
+            for field in self.parser(self):
+                yield field
+        else:
+            yield RawBytes(self, "content", self.size//8)
+
+    def createDescription(self):
+        return "Table %s (%s)" % (self.table["tag"].value, self.table.path)
+
+class TrueTypeFontFile(Parser):
+    endian = BIG_ENDIAN
+    PARSER_TAGS = {
+        "id": "ttf",
+        "category": "misc",
+        "file_ext": ("ttf",),
+        "min_size": 10*8, # FIXME
+        "description": "TrueType font",
+    }
+
+    def validate(self):
+        if self["maj_ver"].value != 1:
+            return "Invalid major version (%u)" % self["maj_ver"].value
+        if self["min_ver"].value != 0:
+            return "Invalid minor version (%u)" % self["min_ver"].value
+        if not (MIN_NB_TABLE <= self["nb_table"].value <= MAX_NB_TABLE):
+            return "Invalid number of table (%u)" % self["nb_table"].value
+        return True
+
+    def createFields(self):
+        yield UInt16(self, "maj_ver", "Major version")
+        yield UInt16(self, "min_ver", "Minor version")
+        yield UInt16(self, "nb_table")
+        yield UInt16(self, "search_range")
+        yield UInt16(self, "entry_selector")
+        yield UInt16(self, "range_shift")
+        tables = []
+        for index in xrange(self["nb_table"].value):
+            table = TableHeader(self, "table_hdr[]")
+            yield table
+            tables.append(table)
+        tables.sort(key=lambda field: field["offset"].value)
+        for table in tables:
+            padding = self.seekByte(table["offset"].value, null=True)
+            if padding:
+                yield padding
+            size = table["size"].value
+            if size:
+                yield Table(self, "table[]", table, size=size*8)
+        padding = self.seekBit(self.size, null=True)
+        if padding:
+            yield padding
+
diff --git a/lib/hachoir_parser/misc/word_doc.py b/lib/hachoir_parser/misc/word_doc.py
new file mode 100644
index 0000000000000000000000000000000000000000..88de4c297ec4d4ba071a6ba447e382f5dba7d9c5
--- /dev/null
+++ b/lib/hachoir_parser/misc/word_doc.py
@@ -0,0 +1,299 @@
+"""
+Documents:
+
+* libwx source code: see fib.c source code
+* "Microsoft Word 97 Binary File Format"
+   http://bio.gsi.de/DOCS/AIX/wword8.html
+
+   Microsoft Word 97 (aka Version 8) for Windows and Macintosh. From the Office
+   book, found in the Microsoft Office Development section in the MSDN Online
+   Library. HTMLified June 1998. Revised Aug 1 1998, added missing Definitions
+   section. Revised Dec 21 1998, added missing Document Properties (section).
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    Bit, Bits,
+    UInt8, Int16, UInt16, UInt32, Int32,
+    NullBytes, RawBytes, PascalString16,
+    DateTimeMSDOS32)
+from hachoir_core.endian import LITTLE_ENDIAN
+
+TIMESTAMP = DateTimeMSDOS32
+
+class BaseWordDocument:
+    def createFields(self):
+        yield UInt16(self, "wIdent", 2)
+        yield UInt16(self, "nFib")
+        yield UInt16(self, "nProduct")
+        yield UInt16(self, "lid")
+        yield Int16(self, "pnNext")
+
+        yield Bit(self, "fDot")
+        yield Bit(self, "fGlsy")
+        yield Bit(self, "fComplex")
+        yield Bit(self, "fHasPic")
+        yield Bits(self, "cQuickSaves", 4)
+        yield Bit(self, "fEncrypted")
+        yield Bit(self, "fWhichTblStm")
+        yield Bit(self, "fReadOnlyRecommanded")
+        yield Bit(self, "fWriteReservation")
+        yield Bit(self, "fExtChar")
+        yield Bit(self, "fLoadOverride")
+        yield Bit(self, "fFarEeast")
+        yield Bit(self, "fCrypto")
+
+        yield UInt16(self, "nFibBack")
+        yield UInt32(self, "lKey")
+        yield UInt8(self, "envr")
+
+        yield Bit(self, "fMac")
+        yield Bit(self, "fEmptySpecial")
+        yield Bit(self, "fLoadOverridePage")
+        yield Bit(self, "fFutureSavedUndo")
+        yield Bit(self, "fWord97Save")
+        yield Bits(self, "fSpare0", 3)
+
+        yield UInt16(self, "chse")
+        yield UInt16(self, "chsTables")
+        yield UInt32(self, "fcMin")
+        yield UInt32(self, "fcMac")
+
+        yield PascalString16(self, "file_creator", strip="\0")
+
+        yield NullBytes(self, "reserved[]", 12)
+
+        yield Int16(self, "lidFE")
+        yield UInt16(self, "clw")
+        yield Int32(self, "cbMac")
+        yield UInt32(self, "lProductCreated")
+        yield TIMESTAMP(self, "lProductRevised")
+
+        yield UInt32(self, "ccpText")
+        yield Int32(self, "ccpFtn")
+        yield Int32(self, "ccpHdr")
+        yield Int32(self, "ccpMcr")
+        yield Int32(self, "ccpAtn")
+        yield Int32(self, "ccpEdn")
+        yield Int32(self, "ccpTxbx")
+        yield Int32(self, "ccpHdrTxbx")
+        yield Int32(self, "pnFbpChpFirst")
+        yield Int32(self, "pnChpFirst")
+        yield Int32(self, "cpnBteChp")
+        yield Int32(self, "pnFbpPapFirst")
+        yield Int32(self, "pnPapFirst")
+        yield Int32(self, "cpnBtePap")
+        yield Int32(self, "pnFbpLvcFirst")
+        yield Int32(self, "pnLvcFirst")
+        yield Int32(self, "cpnBteLvc")
+        yield Int32(self, "fcIslandFirst")
+        yield Int32(self, "fcIslandLim")
+        yield UInt16(self, "cfclcb")
+        yield Int32(self, "fcStshfOrig")
+        yield UInt32(self, "lcbStshfOrig")
+        yield Int32(self, "fcStshf")
+        yield UInt32(self, "lcbStshf")
+
+        yield Int32(self, "fcPlcffndRef")
+        yield UInt32(self, "lcbPlcffndRef")
+        yield Int32(self, "fcPlcffndTxt")
+        yield UInt32(self, "lcbPlcffndTxt")
+        yield Int32(self, "fcPlcfandRef")
+        yield UInt32(self, "lcbPlcfandRef")
+        yield Int32(self, "fcPlcfandTxt")
+        yield UInt32(self, "lcbPlcfandTxt")
+        yield Int32(self, "fcPlcfsed")
+        yield UInt32(self, "lcbPlcfsed")
+        yield Int32(self, "fcPlcpad")
+        yield UInt32(self, "lcbPlcpad")
+        yield Int32(self, "fcPlcfphe")
+        yield UInt32(self, "lcbPlcfphe")
+        yield Int32(self, "fcSttbfglsy")
+        yield UInt32(self, "lcbSttbfglsy")
+        yield Int32(self, "fcPlcfglsy")
+        yield UInt32(self, "lcbPlcfglsy")
+        yield Int32(self, "fcPlcfhdd")
+        yield UInt32(self, "lcbPlcfhdd")
+        yield Int32(self, "fcPlcfbteChpx")
+        yield UInt32(self, "lcbPlcfbteChpx")
+        yield Int32(self, "fcPlcfbtePapx")
+        yield UInt32(self, "lcbPlcfbtePapx")
+        yield Int32(self, "fcPlcfsea")
+        yield UInt32(self, "lcbPlcfsea")
+        yield Int32(self, "fcSttbfffn")
+        yield UInt32(self, "lcbSttbfffn")
+        yield Int32(self, "fcPlcffldMom")
+        yield UInt32(self, "lcbPlcffldMom")
+        yield Int32(self, "fcPlcffldHdr")
+        yield UInt32(self, "lcbPlcffldHdr")
+        yield Int32(self, "fcPlcffldFtn")
+        yield UInt32(self, "lcbPlcffldFtn")
+        yield Int32(self, "fcPlcffldAtn")
+        yield UInt32(self, "lcbPlcffldAtn")
+        yield Int32(self, "fcPlcffldMcr")
+        yield UInt32(self, "lcbPlcffldMcr")
+        yield Int32(self, "fcSttbfbkmk")
+        yield UInt32(self, "lcbSttbfbkmk")
+        yield Int32(self, "fcPlcfbkf")
+        yield UInt32(self, "lcbPlcfbkf")
+        yield Int32(self, "fcPlcfbkl")
+        yield UInt32(self, "lcbPlcfbkl")
+        yield Int32(self, "fcCmds")
+        yield UInt32(self, "lcbCmds")
+        yield Int32(self, "fcPlcmcr")
+        yield UInt32(self, "lcbPlcmcr")
+        yield Int32(self, "fcSttbfmcr")
+        yield UInt32(self, "lcbSttbfmcr")
+        yield Int32(self, "fcPrDrvr")
+        yield UInt32(self, "lcbPrDrvr")
+        yield Int32(self, "fcPrEnvPort")
+        yield UInt32(self, "lcbPrEnvPort")
+        yield Int32(self, "fcPrEnvLand")
+        yield UInt32(self, "lcbPrEnvLand")
+        yield Int32(self, "fcWss")
+        yield UInt32(self, "lcbWss")
+        yield Int32(self, "fcDop")
+        yield UInt32(self, "lcbDop")
+        yield Int32(self, "fcSttbfAssoc")
+        yield UInt32(self, "lcbSttbfAssoc")
+        yield Int32(self, "fcClx")
+        yield UInt32(self, "lcbClx")
+        yield Int32(self, "fcPlcfpgdFtn")
+        yield UInt32(self, "lcbPlcfpgdFtn")
+        yield Int32(self, "fcAutosaveSource")
+        yield UInt32(self, "lcbAutosaveSource")
+        yield Int32(self, "fcGrpXstAtnOwners")
+        yield UInt32(self, "lcbGrpXstAtnOwners")
+        yield Int32(self, "fcSttbfAtnbkmk")
+        yield UInt32(self, "lcbSttbfAtnbkmk")
+        yield Int32(self, "fcPlcdoaMom")
+        yield UInt32(self, "lcbPlcdoaMom")
+        yield Int32(self, "fcPlcdoaHdr")
+        yield UInt32(self, "lcbPlcdoaHdr")
+        yield Int32(self, "fcPlcspaMom")
+        yield UInt32(self, "lcbPlcspaMom")
+        yield Int32(self, "fcPlcspaHdr")
+        yield UInt32(self, "lcbPlcspaHdr")
+        yield Int32(self, "fcPlcfAtnbkf")
+        yield UInt32(self, "lcbPlcfAtnbkf")
+        yield Int32(self, "fcPlcfAtnbkl")
+        yield UInt32(self, "lcbPlcfAtnbkl")
+        yield Int32(self, "fcPms")
+        yield UInt32(self, "lcbPms")
+        yield Int32(self, "fcFormFldSttbs")
+        yield UInt32(self, "lcbFormFldSttbs")
+        yield Int32(self, "fcPlcfendRef")
+        yield UInt32(self, "lcbPlcfendRef")
+        yield Int32(self, "fcPlcfendTxt")
+        yield UInt32(self, "lcbPlcfendTxt")
+        yield Int32(self, "fcPlcffldEdn")
+        yield UInt32(self, "lcbPlcffldEdn")
+        yield Int32(self, "fcPlcfpgdEdn")
+        yield UInt32(self, "lcbPlcfpgdEdn")
+        yield Int32(self, "fcDggInfo")
+        yield UInt32(self, "lcbDggInfo")
+        yield Int32(self, "fcSttbfRMark")
+        yield UInt32(self, "lcbSttbfRMark")
+        yield Int32(self, "fcSttbCaption")
+        yield UInt32(self, "lcbSttbCaption")
+        yield Int32(self, "fcSttbAutoCaption")
+        yield UInt32(self, "lcbSttbAutoCaption")
+        yield Int32(self, "fcPlcfwkb")
+        yield UInt32(self, "lcbPlcfwkb")
+        yield Int32(self, "fcPlcfspl")
+        yield UInt32(self, "lcbPlcfspl")
+        yield Int32(self, "fcPlcftxbxTxt")
+        yield UInt32(self, "lcbPlcftxbxTxt")
+        yield Int32(self, "fcPlcffldTxbx")
+        yield UInt32(self, "lcbPlcffldTxbx")
+        yield Int32(self, "fcPlcfhdrtxbxTxt")
+        yield UInt32(self, "lcbPlcfhdrtxbxTxt")
+        yield Int32(self, "fcPlcffldHdrTxbx")
+        yield UInt32(self, "lcbPlcffldHdrTxbx")
+        yield Int32(self, "fcStwUser")
+        yield UInt32(self, "lcbStwUser")
+        yield Int32(self, "fcSttbttmbd")
+        yield UInt32(self, "cbSttbttmbd")
+        yield Int32(self, "fcUnused")
+        yield UInt32(self, "lcbUnused")
+        yield Int32(self, "fcPgdMother")
+        yield UInt32(self, "lcbPgdMother")
+        yield Int32(self, "fcBkdMother")
+        yield UInt32(self, "lcbBkdMother")
+        yield Int32(self, "fcPgdFtn")
+        yield UInt32(self, "lcbPgdFtn")
+        yield Int32(self, "fcBkdFtn")
+        yield UInt32(self, "lcbBkdFtn")
+        yield Int32(self, "fcPgdEdn")
+        yield UInt32(self, "lcbPgdEdn")
+        yield Int32(self, "fcBkdEdn")
+        yield UInt32(self, "lcbBkdEdn")
+        yield Int32(self, "fcSttbfIntlFld")
+        yield UInt32(self, "lcbSttbfIntlFld")
+        yield Int32(self, "fcRouteSlip")
+        yield UInt32(self, "lcbRouteSlip")
+        yield Int32(self, "fcSttbSavedBy")
+        yield UInt32(self, "lcbSttbSavedBy")
+        yield Int32(self, "fcSttbFnm")
+        yield UInt32(self, "lcbSttbFnm")
+        yield Int32(self, "fcPlcfLst")
+        yield UInt32(self, "lcbPlcfLst")
+        yield Int32(self, "fcPlfLfo")
+        yield UInt32(self, "lcbPlfLfo")
+        yield Int32(self, "fcPlcftxbxBkd")
+        yield UInt32(self, "lcbPlcftxbxBkd")
+        yield Int32(self, "fcPlcftxbxHdrBkd")
+        yield UInt32(self, "lcbPlcftxbxHdrBkd")
+        yield Int32(self, "fcDocUndo")
+        yield UInt32(self, "lcbDocUndo")
+        yield Int32(self, "fcRgbuse")
+        yield UInt32(self, "lcbRgbuse")
+        yield Int32(self, "fcUsp")
+        yield UInt32(self, "lcbUsp")
+        yield Int32(self, "fcUskf")
+        yield UInt32(self, "lcbUskf")
+        yield Int32(self, "fcPlcupcRgbuse")
+        yield UInt32(self, "lcbPlcupcRgbuse")
+        yield Int32(self, "fcPlcupcUsp")
+        yield UInt32(self, "lcbPlcupcUsp")
+        yield Int32(self, "fcSttbGlsyStyle")
+        yield UInt32(self, "lcbSttbGlsyStyle")
+        yield Int32(self, "fcPlgosl")
+        yield UInt32(self, "lcbPlgosl")
+        yield Int32(self, "fcPlcocx")
+        yield UInt32(self, "lcbPlcocx")
+        yield Int32(self, "fcPlcfbteLvc")
+        yield UInt32(self, "lcbPlcfbteLvc")
+        yield TIMESTAMP(self, "ftModified")
+        yield Int32(self, "fcPlcflvc")
+        yield UInt32(self, "lcbPlcflvc")
+        yield Int32(self, "fcPlcasumy")
+        yield UInt32(self, "lcbPlcasumy")
+        yield Int32(self, "fcPlcfgram")
+        yield UInt32(self, "lcbPlcfgram")
+        yield Int32(self, "fcSttbListNames")
+        yield UInt32(self, "lcbSttbListNames")
+        yield Int32(self, "fcSttbfUssr")
+        yield UInt32(self, "lcbSttbfUssr")
+
+        tail = (self.size - self.current_size) // 8
+        if tail:
+            yield RawBytes(self, "tail", tail)
+
+class WordDocumentFieldSet(BaseWordDocument, FieldSet):
+    pass
+
+class WordDocumentParser(BaseWordDocument, Parser):
+    PARSER_TAGS = {
+        "id": "word_document",
+        "min_size": 8,
+        "description": "Microsoft Office Word document",
+    }
+    endian = LITTLE_ENDIAN
+
+    def __init__(self, stream, **kw):
+        Parser.__init__(self, stream, **kw)
+
+    def validate(self):
+        return True
+
diff --git a/lib/hachoir_parser/network/__init__.py b/lib/hachoir_parser/network/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..a7fe24734553b0981d39f5a29a7359a4a8bdc3c0
--- /dev/null
+++ b/lib/hachoir_parser/network/__init__.py
@@ -0,0 +1,2 @@
+from hachoir_parser.network.tcpdump import TcpdumpFile
+
diff --git a/lib/hachoir_parser/network/common.py b/lib/hachoir_parser/network/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..d6e9feaa61cfd52166c1950a482a467a2d4b2319
--- /dev/null
+++ b/lib/hachoir_parser/network/common.py
@@ -0,0 +1,118 @@
+from hachoir_core.field import FieldSet, Field, Bits
+from hachoir_core.bits import str2hex
+from hachoir_parser.network.ouid import REGISTERED_OUID
+from hachoir_core.endian import BIG_ENDIAN
+from socket import gethostbyaddr, herror as socket_host_error
+
+def ip2name(addr):
+    if not ip2name.resolve:
+        return addr
+    try:
+        if addr in ip2name.cache:
+            return ip2name.cache[addr]
+        # FIXME: Workaround Python bug
+        # Need double try/except to catch the bug
+        try:
+            name = gethostbyaddr(addr)[0]
+        except KeyboardInterrupt:
+            raise
+    except (socket_host_error, ValueError):
+        name = addr
+    except (socket_host_error, KeyboardInterrupt, ValueError):
+        ip2name.resolve = False
+        name = addr
+    ip2name.cache[addr] = name
+    return name
+ip2name.cache = {}
+ip2name.resolve = True
+
+class IPv4_Address(Field):
+    def __init__(self, parent, name, description=None):
+        Field.__init__(self, parent, name, 32, description)
+
+    def createValue(self):
+        value = self._parent.stream.readBytes(self.absolute_address, 4)
+        return ".".join(( "%u" % ord(byte) for byte in value ))
+
+    def createDisplay(self):
+        return ip2name(self.value)
+
+class IPv6_Address(Field):
+    def __init__(self, parent, name, description=None):
+        Field.__init__(self, parent, name, 128, description)
+
+    def createValue(self):
+        value = self._parent.stream.readBits(self.absolute_address, 128, self.parent.endian)
+        parts = []
+        for index in xrange(8):
+            part = "%04x" % (value & 0xffff)
+            value >>= 16
+            parts.append(part)
+        return ':'.join(reversed(parts))
+
+    def createDisplay(self):
+        return self.value
+
+class OrganizationallyUniqueIdentifier(Bits):
+    """
+    IEEE 24-bit Organizationally unique identifier
+    """
+    static_size = 24
+
+    def __init__(self, parent, name, description=None):
+        Bits.__init__(self, parent, name, 24, description=None)
+
+    def createDisplay(self, human=True):
+        if human:
+            key = self.value
+            if key in REGISTERED_OUID:
+                return REGISTERED_OUID[key]
+            else:
+                return self.raw_display
+        else:
+            return self.raw_display
+
+    def createRawDisplay(self):
+        value = self.value
+        a = value >> 16
+        b = (value >> 8) & 0xFF
+        c = value & 0xFF
+        return "%02X-%02X-%02X" % (a, b, c)
+
+class NIC24(Bits):
+    static_size = 24
+
+    def __init__(self, parent, name, description=None):
+        Bits.__init__(self, parent, name, 24, description=None)
+
+    def createDisplay(self):
+        value = self.value
+        a = value >> 16
+        b = (value >> 8) & 0xFF
+        c = value & 0xFF
+        return "%02x:%02x:%02x" % (a, b, c)
+
+    def createRawDisplay(self):
+        return "0x%06X" % self.value
+
+class MAC48_Address(FieldSet):
+    """
+    IEEE 802 48-bit MAC address
+    """
+    static_size = 48
+    endian = BIG_ENDIAN
+
+    def createFields(self):
+        yield OrganizationallyUniqueIdentifier(self, "organization")
+        yield NIC24(self, "nic")
+
+    def hasValue(self):
+        return True
+
+    def createValue(self):
+        bytes = self.stream.readBytes(self.absolute_address, 6)
+        return str2hex(bytes, format="%02x:")[:-1]
+
+    def createDisplay(self):
+        return "%s [%s]" % (self["organization"].display, self["nic"].display)
+
diff --git a/lib/hachoir_parser/network/ouid.py b/lib/hachoir_parser/network/ouid.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc912b568a409208f948114be41cd01c5d77c6b9
--- /dev/null
+++ b/lib/hachoir_parser/network/ouid.py
@@ -0,0 +1,10110 @@
+# -*- coding: utf-8 -*-
+"""
+List of registered IEEE 24-bit Organizationally Unique IDentifiers.
+
+Original data file:
+http://standards.ieee.org/regauth/oui/oui.txt
+"""
+
+REGISTERED_OUID = {
+   0x000000: u'XEROX CORPORATION',
+   0x000001: u'XEROX CORPORATION',
+   0x000002: u'XEROX CORPORATION',
+   0x000003: u'XEROX CORPORATION',
+   0x000004: u'XEROX CORPORATION',
+   0x000005: u'XEROX CORPORATION',
+   0x000006: u'XEROX CORPORATION',
+   0x000007: u'XEROX CORPORATION',
+   0x000008: u'XEROX CORPORATION',
+   0x000009: u'XEROX CORPORATION',
+   0x00000A: u'OMRON TATEISI ELECTRONICS CO.',
+   0x00000B: u'MATRIX CORPORATION',
+   0x00000C: u'CISCO SYSTEMS, INC.',
+   0x00000D: u'FIBRONICS LTD.',
+   0x00000E: u'FUJITSU LIMITED',
+   0x00000F: u'NEXT, INC.',
+   0x000010: u'SYTEK INC.',
+   0x000011: u'NORMEREL SYSTEMES',
+   0x000012: u'INFORMATION TECHNOLOGY LIMITED',
+   0x000013: u'CAMEX',
+   0x000014: u'NETRONIX',
+   0x000015: u'DATAPOINT CORPORATION',
+   0x000016: u'DU PONT PIXEL SYSTEMS.',
+   0x000017: u'TEKELEC',
+   0x000018: u'WEBSTER COMPUTER CORPORATION',
+   0x000019: u'APPLIED DYNAMICS INTERNATIONAL',
+   0x00001A: u'ADVANCED MICRO DEVICES',
+   0x00001B: u'NOVELL INC.',
+   0x00001C: u'BELL TECHNOLOGIES',
+   0x00001D: u'CABLETRON SYSTEMS, INC.',
+   0x00001E: u'TELSIST INDUSTRIA ELECTRONICA',
+   0x00001F: u'Telco Systems, Inc.',
+   0x000020: u'DATAINDUSTRIER DIAB AB',
+   0x000021: u'SUREMAN COMP. & COMMUN. CORP.',
+   0x000022: u'VISUAL TECHNOLOGY INC.',
+   0x000023: u'ABB INDUSTRIAL SYSTEMS AB',
+   0x000024: u'CONNECT AS',
+   0x000025: u'RAMTEK CORP.',
+   0x000026: u'SHA-KEN CO., LTD.',
+   0x000027: u'JAPAN RADIO COMPANY',
+   0x000028: u'PRODIGY SYSTEMS CORPORATION',
+   0x000029: u'IMC NETWORKS CORP.',
+   0x00002A: u'TRW - SEDD/INP',
+   0x00002B: u'CRISP AUTOMATION, INC',
+   0x00002C: u'AUTOTOTE LIMITED',
+   0x00002D: u'CHROMATICS INC',
+   0x00002E: u'SOCIETE EVIRA',
+   0x00002F: u'TIMEPLEX INC.',
+   0x000030: u'VG LABORATORY SYSTEMS LTD',
+   0x000031: u'QPSX COMMUNICATIONS PTY LTD',
+   0x000032: u'Marconi plc',
+   0x000033: u'EGAN MACHINERY COMPANY',
+   0x000034: u'NETWORK RESOURCES CORPORATION',
+   0x000035: u'SPECTRAGRAPHICS CORPORATION',
+   0x000036: u'ATARI CORPORATION',
+   0x000037: u'OXFORD METRICS LIMITED',
+   0x000038: u'CSS LABS',
+   0x000039: u'TOSHIBA CORPORATION',
+   0x00003A: u'CHYRON CORPORATION',
+   0x00003B: u'i Controls, Inc.',
+   0x00003C: u'AUSPEX SYSTEMS INC.',
+   0x00003D: u'UNISYS',
+   0x00003E: u'SIMPACT',
+   0x00003F: u'SYNTREX, INC.',
+   0x000040: u'APPLICON, INC.',
+   0x000041: u'ICE CORPORATION',
+   0x000042: u'METIER MANAGEMENT SYSTEMS LTD.',
+   0x000043: u'MICRO TECHNOLOGY',
+   0x000044: u'CASTELLE CORPORATION',
+   0x000045: u'FORD AEROSPACE & COMM. CORP.',
+   0x000046: u'OLIVETTI NORTH AMERICA',
+   0x000047: u'NICOLET INSTRUMENTS CORP.',
+   0x000048: u'SEIKO EPSON CORPORATION',
+   0x000049: u'APRICOT COMPUTERS, LTD',
+   0x00004A: u'ADC CODENOLL TECHNOLOGY CORP.',
+   0x00004B: u'ICL DATA OY',
+   0x00004C: u'NEC CORPORATION',
+   0x00004D: u'DCI CORPORATION',
+   0x00004E: u'AMPEX CORPORATION',
+   0x00004F: u'LOGICRAFT, INC.',
+   0x000050: u'RADISYS CORPORATION',
+   0x000051: u'HOB ELECTRONIC GMBH & CO. KG',
+   0x000052: u'Intrusion.com, Inc.',
+   0x000053: u'COMPUCORP',
+   0x000054: u'MODICON, INC.',
+   0x000055: u'COMMISSARIAT A L`ENERGIE ATOM.',
+   0x000056: u'DR. B. STRUCK',
+   0x000057: u'SCITEX CORPORATION LTD.',
+   0x000058: u'RACORE COMPUTER PRODUCTS INC.',
+   0x000059: u'HELLIGE GMBH',
+   0x00005A: u'SysKonnect GmbH',
+   0x00005B: u'ELTEC ELEKTRONIK AG',
+   0x00005C: u'TELEMATICS INTERNATIONAL INC.',
+   0x00005D: u'CS TELECOM',
+   0x00005E: u'USC INFORMATION SCIENCES INST',
+   0x00005F: u'SUMITOMO ELECTRIC IND., LTD.',
+   0x000060: u'KONTRON ELEKTRONIK GMBH',
+   0x000061: u'GATEWAY COMMUNICATIONS',
+   0x000062: u'BULL HN INFORMATION SYSTEMS',
+   0x000063: u'BARCO CONTROL ROOMS GMBH',
+   0x000064: u'YOKOGAWA DIGITAL COMPUTER CORP',
+   0x000065: u'Network General Corporation',
+   0x000066: u'TALARIS SYSTEMS, INC.',
+   0x000067: u'SOFT * RITE, INC.',
+   0x000068: u'ROSEMOUNT CONTROLS',
+   0x000069: u'CONCORD COMMUNICATIONS INC',
+   0x00006A: u'COMPUTER CONSOLES INC.',
+   0x00006B: u'SILICON GRAPHICS INC./MIPS',
+   0x00006C: u'PRIVATE',
+   0x00006D: u'CRAY COMMUNICATIONS, LTD.',
+   0x00006E: u'ARTISOFT, INC.',
+   0x00006F: u'Madge Ltd.',
+   0x000070: u'HCL LIMITED',
+   0x000071: u'ADRA SYSTEMS INC.',
+   0x000072: u'MINIWARE TECHNOLOGY',
+   0x000073: u'SIECOR CORPORATION',
+   0x000074: u'RICOH COMPANY LTD.',
+   0x000075: u'Nortel Networks',
+   0x000076: u'ABEKAS VIDEO SYSTEM',
+   0x000077: u'INTERPHASE CORPORATION',
+   0x000078: u'LABTAM LIMITED',
+   0x000079: u'NETWORTH INCORPORATED',
+   0x00007A: u'DANA COMPUTER INC.',
+   0x00007B: u'RESEARCH MACHINES',
+   0x00007C: u'AMPERE INCORPORATED',
+   0x00007D: u'SUN MICROSYSTEMS, INC.',
+   0x00007E: u'CLUSTRIX CORPORATION',
+   0x00007F: u'LINOTYPE-HELL AG',
+   0x000080: u'CRAY COMMUNICATIONS A/S',
+   0x000081: u'BAY NETWORKS',
+   0x000082: u'LECTRA SYSTEMES SA',
+   0x000083: u'TADPOLE TECHNOLOGY PLC',
+   0x000084: u'SUPERNET',
+   0x000085: u'CANON INC.',
+   0x000086: u'MEGAHERTZ CORPORATION',
+   0x000087: u'HITACHI, LTD.',
+   0x000088: u'COMPUTER NETWORK TECH. CORP.',
+   0x000089: u'CAYMAN SYSTEMS INC.',
+   0x00008A: u'DATAHOUSE INFORMATION SYSTEMS',
+   0x00008B: u'INFOTRON',
+   0x00008C: u'Alloy Computer Products (Australia) Pty Ltd',
+   0x00008D: u'VERDIX CORPORATION',
+   0x00008E: u'SOLBOURNE COMPUTER, INC.',
+   0x00008F: u'RAYTHEON COMPANY',
+   0x000090: u'MICROCOM',
+   0x000091: u'ANRITSU CORPORATION',
+   0x000092: u'COGENT DATA TECHNOLOGIES',
+   0x000093: u'PROTEON INC.',
+   0x000094: u'ASANTE TECHNOLOGIES',
+   0x000095: u'SONY TEKTRONIX CORP.',
+   0x000096: u'MARCONI ELECTRONICS LTD.',
+   0x000097: u'EPOCH SYSTEMS',
+   0x000098: u'CROSSCOMM CORPORATION',
+   0x000099: u'MTX, INC.',
+   0x00009A: u'RC COMPUTER A/S',
+   0x00009B: u'INFORMATION INTERNATIONAL, INC',
+   0x00009C: u'ROLM MIL-SPEC COMPUTERS',
+   0x00009D: u'LOCUS COMPUTING CORPORATION',
+   0x00009E: u'MARLI S.A.',
+   0x00009F: u'AMERISTAR TECHNOLOGIES INC.',
+   0x0000A0: u'SANYO Electric Co., Ltd.',
+   0x0000A1: u'MARQUETTE ELECTRIC CO.',
+   0x0000A2: u'BAY NETWORKS',
+   0x0000A3: u'NETWORK APPLICATION TECHNOLOGY',
+   0x0000A4: u'ACORN COMPUTERS LIMITED',
+   0x0000A5: u'COMPATIBLE SYSTEMS CORP.',
+   0x0000A6: u'NETWORK GENERAL CORPORATION',
+   0x0000A7: u'NETWORK COMPUTING DEVICES INC.',
+   0x0000A8: u'STRATUS COMPUTER INC.',
+   0x0000A9: u'NETWORK SYSTEMS CORP.',
+   0x0000AA: u'XEROX CORPORATION',
+   0x0000AB: u'LOGIC MODELING CORPORATION',
+   0x0000AC: u'CONWARE COMPUTER CONSULTING',
+   0x0000AD: u'BRUKER INSTRUMENTS INC.',
+   0x0000AE: u'DASSAULT ELECTRONIQUE',
+   0x0000AF: u'NUCLEAR DATA INSTRUMENTATION',
+   0x0000B0: u'RND-RAD NETWORK DEVICES',
+   0x0000B1: u'ALPHA MICROSYSTEMS INC.',
+   0x0000B2: u'TELEVIDEO SYSTEMS, INC.',
+   0x0000B3: u'CIMLINC INCORPORATED',
+   0x0000B4: u'EDIMAX COMPUTER COMPANY',
+   0x0000B5: u'DATABILITY SOFTWARE SYS. INC.',
+   0x0000B6: u'MICRO-MATIC RESEARCH',
+   0x0000B7: u'DOVE COMPUTER CORPORATION',
+   0x0000B8: u'SEIKOSHA CO., LTD.',
+   0x0000B9: u'MCDONNELL DOUGLAS COMPUTER SYS',
+   0x0000BA: u'SIIG, INC.',
+   0x0000BB: u'TRI-DATA',
+   0x0000BC: u'ALLEN-BRADLEY CO. INC.',
+   0x0000BD: u'MITSUBISHI CABLE COMPANY',
+   0x0000BE: u'THE NTI GROUP',
+   0x0000BF: u'SYMMETRIC COMPUTER SYSTEMS',
+   0x0000C0: u'WESTERN DIGITAL CORPORATION',
+   0x0000C1: u'Madge Ltd.',
+   0x0000C2: u'INFORMATION PRESENTATION TECH.',
+   0x0000C3: u'HARRIS CORP COMPUTER SYS DIV',
+   0x0000C4: u'WATERS DIV. OF MILLIPORE',
+   0x0000C5: u'FARALLON COMPUTING/NETOPIA',
+   0x0000C6: u'EON SYSTEMS',
+   0x0000C7: u'ARIX CORPORATION',
+   0x0000C8: u'ALTOS COMPUTER SYSTEMS',
+   0x0000C9: u'EMULEX CORPORATION',
+   0x0000CA: u'ARRIS International',
+   0x0000CB: u'COMPU-SHACK ELECTRONIC GMBH',
+   0x0000CC: u'DENSAN CO., LTD.',
+   0x0000CD: u'Allied Telesyn Research Ltd.',
+   0x0000CE: u'MEGADATA CORP.',
+   0x0000CF: u'HAYES MICROCOMPUTER PRODUCTS',
+   0x0000D0: u'DEVELCON ELECTRONICS LTD.',
+   0x0000D1: u'ADAPTEC INCORPORATED',
+   0x0000D2: u'SBE, INC.',
+   0x0000D3: u'WANG LABORATORIES INC.',
+   0x0000D4: u'PURE DATA LTD.',
+   0x0000D5: u'MICROGNOSIS INTERNATIONAL',
+   0x0000D6: u'PUNCH LINE HOLDING',
+   0x0000D7: u'DARTMOUTH COLLEGE',
+   0x0000D8: u'NOVELL, INC.',
+   0x0000D9: u'NIPPON TELEGRAPH & TELEPHONE',
+   0x0000DA: u'ATEX',
+   0x0000DB: u'BRITISH TELECOMMUNICATIONS PLC',
+   0x0000DC: u'HAYES MICROCOMPUTER PRODUCTS',
+   0x0000DD: u'TCL INCORPORATED',
+   0x0000DE: u'CETIA',
+   0x0000DF: u'BELL & HOWELL PUB SYS DIV',
+   0x0000E0: u'QUADRAM CORP.',
+   0x0000E1: u'GRID SYSTEMS',
+   0x0000E2: u'ACER TECHNOLOGIES CORP.',
+   0x0000E3: u'INTEGRATED MICRO PRODUCTS LTD',
+   0x0000E4: u'IN2 GROUPE INTERTECHNIQUE',
+   0x0000E5: u'SIGMEX LTD.',
+   0x0000E6: u'APTOR PRODUITS DE COMM INDUST',
+   0x0000E7: u'STAR GATE TECHNOLOGIES',
+   0x0000E8: u'ACCTON TECHNOLOGY CORP.',
+   0x0000E9: u'ISICAD, INC.',
+   0x0000EA: u'UPNOD AB',
+   0x0000EB: u'MATSUSHITA COMM. IND. CO. LTD.',
+   0x0000EC: u'MICROPROCESS',
+   0x0000ED: u'APRIL',
+   0x0000EE: u'NETWORK DESIGNERS, LTD.',
+   0x0000EF: u'KTI',
+   0x0000F0: u'SAMSUNG ELECTRONICS CO., LTD.',
+   0x0000F1: u'MAGNA COMPUTER CORPORATION',
+   0x0000F2: u'SPIDER COMMUNICATIONS',
+   0x0000F3: u'GANDALF DATA LIMITED',
+   0x0000F4: u'ALLIED TELESYN INTERNATIONAL',
+   0x0000F5: u'DIAMOND SALES LIMITED',
+   0x0000F6: u'APPLIED MICROSYSTEMS CORP.',
+   0x0000F7: u'YOUTH KEEP ENTERPRISE CO LTD',
+   0x0000F8: u'DIGITAL EQUIPMENT CORPORATION',
+   0x0000F9: u'QUOTRON SYSTEMS INC.',
+   0x0000FA: u'MICROSAGE COMPUTER SYSTEMS INC',
+   0x0000FB: u'RECHNER ZUR KOMMUNIKATION',
+   0x0000FC: u'MEIKO',
+   0x0000FD: u'HIGH LEVEL HARDWARE',
+   0x0000FE: u'ANNAPOLIS MICRO SYSTEMS',
+   0x0000FF: u'CAMTEC ELECTRONICS LTD.',
+   0x000100: u'EQUIP\'TRANS',
+   0x000101: u'PRIVATE',
+   0x000102: u'3COM CORPORATION',
+   0x000103: u'3COM CORPORATION',
+   0x000104: u'DVICO Co., Ltd.',
+   0x000105: u'BECKHOFF GmbH',
+   0x000106: u'Tews Datentechnik GmbH',
+   0x000107: u'Leiser GmbH',
+   0x000108: u'AVLAB Technology, Inc.',
+   0x000109: u'Nagano Japan Radio Co., Ltd.',
+   0x00010A: u'CIS TECHNOLOGY INC.',
+   0x00010B: u'Space CyberLink, Inc.',
+   0x00010C: u'System Talks Inc.',
+   0x00010D: u'CORECO, INC.',
+   0x00010E: u'Bri-Link Technologies Co., Ltd',
+   0x00010F: u'McDATA Corporation',
+   0x000110: u'Gotham Networks',
+   0x000111: u'iDigm Inc.',
+   0x000112: u'Shark Multimedia Inc.',
+   0x000113: u'OLYMPUS CORPORATION',
+   0x000114: u'KANDA TSUSHIN KOGYO CO., LTD.',
+   0x000115: u'EXTRATECH CORPORATION',
+   0x000116: u'Netspect Technologies, Inc.',
+   0x000117: u'CANAL +',
+   0x000118: u'EZ Digital Co., Ltd.',
+   0x000119: u'RTUnet (Australia)',
+   0x00011A: u'EEH DataLink GmbH',
+   0x00011B: u'Unizone Technologies, Inc.',
+   0x00011C: u'Universal Talkware Corporation',
+   0x00011D: u'Centillium Communications',
+   0x00011E: u'Precidia Technologies, Inc.',
+   0x00011F: u'RC Networks, Inc.',
+   0x000120: u'OSCILLOQUARTZ S.A.',
+   0x000121: u'Watchguard Technologies, Inc.',
+   0x000122: u'Trend Communications, Ltd.',
+   0x000123: u'DIGITAL ELECTRONICS CORP.',
+   0x000124: u'Acer Incorporated',
+   0x000125: u'YAESU MUSEN CO., LTD.',
+   0x000126: u'PAC Labs',
+   0x000127: u'OPEN Networks Pty Ltd',
+   0x000128: u'EnjoyWeb, Inc.',
+   0x000129: u'DFI Inc.',
+   0x00012A: u'Telematica Sistems Inteligente',
+   0x00012B: u'TELENET Co., Ltd.',
+   0x00012C: u'Aravox Technologies, Inc.',
+   0x00012D: u'Komodo Technology',
+   0x00012E: u'PC Partner Ltd.',
+   0x00012F: u'Twinhead International Corp',
+   0x000130: u'Extreme Networks',
+   0x000131: u'Detection Systems, Inc.',
+   0x000132: u'Dranetz - BMI',
+   0x000133: u'KYOWA Electronic Instruments C',
+   0x000134: u'SIG Positec Systems AG',
+   0x000135: u'KDC Corp.',
+   0x000136: u'CyberTAN Technology, Inc.',
+   0x000137: u'IT Farm Corporation',
+   0x000138: u'XAVi Technologies Corp.',
+   0x000139: u'Point Multimedia Systems',
+   0x00013A: u'SHELCAD COMMUNICATIONS, LTD.',
+   0x00013B: u'BNA SYSTEMS',
+   0x00013C: u'TIW SYSTEMS',
+   0x00013D: u'RiscStation Ltd.',
+   0x00013E: u'Ascom Tateco AB',
+   0x00013F: u'Neighbor World Co., Ltd.',
+   0x000140: u'Sendtek Corporation',
+   0x000141: u'CABLE PRINT',
+   0x000142: u'Cisco Systems, Inc.',
+   0x000143: u'Cisco Systems, Inc.',
+   0x000144: u'EMC Corporation',
+   0x000145: u'WINSYSTEMS, INC.',
+   0x000146: u'Tesco Controls, Inc.',
+   0x000147: u'Zhone Technologies',
+   0x000148: u'X-traWeb Inc.',
+   0x000149: u'T.D.T. Transfer Data Test GmbH',
+   0x00014A: u'Sony Corporation',
+   0x00014B: u'Ennovate Networks, Inc.',
+   0x00014C: u'Berkeley Process Control',
+   0x00014D: u'Shin Kin Enterprises Co., Ltd',
+   0x00014E: u'WIN Enterprises, Inc.',
+   0x00014F: u'ADTRAN INC',
+   0x000150: u'GILAT COMMUNICATIONS, LTD.',
+   0x000151: u'Ensemble Communications',
+   0x000152: u'CHROMATEK INC.',
+   0x000153: u'ARCHTEK TELECOM CORPORATION',
+   0x000154: u'G3M Corporation',
+   0x000155: u'Promise Technology, Inc.',
+   0x000156: u'FIREWIREDIRECT.COM, INC.',
+   0x000157: u'SYSWAVE CO., LTD',
+   0x000158: u'Electro Industries/Gauge Tech',
+   0x000159: u'S1 Corporation',
+   0x00015A: u'Digital Video Broadcasting',
+   0x00015B: u'ITALTEL S.p.A/RF-UP-I',
+   0x00015C: u'CADANT INC.',
+   0x00015D: u'Sun Microsystems, Inc',
+   0x00015E: u'BEST TECHNOLOGY CO., LTD.',
+   0x00015F: u'DIGITAL DESIGN GmbH',
+   0x000160: u'ELMEX Co., LTD.',
+   0x000161: u'Meta Machine Technology',
+   0x000162: u'Cygnet Technologies, Inc.',
+   0x000163: u'Cisco Systems, Inc.',
+   0x000164: u'Cisco Systems, Inc.',
+   0x000165: u'AirSwitch Corporation',
+   0x000166: u'TC GROUP A/S',
+   0x000167: u'HIOKI E.E. CORPORATION',
+   0x000168: u'VITANA CORPORATION',
+   0x000169: u'Celestix Networks Pte Ltd.',
+   0x00016A: u'ALITEC',
+   0x00016B: u'LightChip, Inc.',
+   0x00016C: u'FOXCONN',
+   0x00016D: u'CarrierComm Inc.',
+   0x00016E: u'Conklin Corporation',
+   0x00016F: u'HAITAI ELECTRONICS CO., LTD.',
+   0x000170: u'ESE Embedded System Engineer\'g',
+   0x000171: u'Allied Data Technologies',
+   0x000172: u'TechnoLand Co., LTD.',
+   0x000173: u'AMCC',
+   0x000174: u'CyberOptics Corporation',
+   0x000175: u'Radiant Communications Corp.',
+   0x000176: u'Orient Silver Enterprises',
+   0x000177: u'EDSL',
+   0x000178: u'MARGI Systems, Inc.',
+   0x000179: u'WIRELESS TECHNOLOGY, INC.',
+   0x00017A: u'Chengdu Maipu Electric Industrial Co., Ltd.',
+   0x00017B: u'Heidelberger Druckmaschinen AG',
+   0x00017C: u'AG-E GmbH',
+   0x00017D: u'ThermoQuest',
+   0x00017E: u'ADTEK System Science Co., Ltd.',
+   0x00017F: u'Experience Music Project',
+   0x000180: u'AOpen, Inc.',
+   0x000181: u'Nortel Networks',
+   0x000182: u'DICA TECHNOLOGIES AG',
+   0x000183: u'ANITE TELECOMS',
+   0x000184: u'SIEB & MEYER AG',
+   0x000185: u'Aloka Co., Ltd.',
+   0x000186: u'Uwe Disch',
+   0x000187: u'i2SE GmbH',
+   0x000188: u'LXCO Technologies ag',
+   0x000189: u'Refraction Technology, Inc.',
+   0x00018A: u'ROI COMPUTER AG',
+   0x00018B: u'NetLinks Co., Ltd.',
+   0x00018C: u'Mega Vision',
+   0x00018D: u'AudeSi Technologies',
+   0x00018E: u'Logitec Corporation',
+   0x00018F: u'Kenetec, Inc.',
+   0x000190: u'SMK-M',
+   0x000191: u'SYRED Data Systems',
+   0x000192: u'Texas Digital Systems',
+   0x000193: u'Hanbyul Telecom Co., Ltd.',
+   0x000194: u'Capital Equipment Corporation',
+   0x000195: u'Sena Technologies, Inc.',
+   0x000196: u'Cisco Systems, Inc.',
+   0x000197: u'Cisco Systems, Inc.',
+   0x000198: u'Darim Vision',
+   0x000199: u'HeiSei Electronics',
+   0x00019A: u'LEUNIG GmbH',
+   0x00019B: u'Kyoto Microcomputer Co., Ltd.',
+   0x00019C: u'JDS Uniphase Inc.',
+   0x00019D: u'E-Control Systems, Inc.',
+   0x00019E: u'ESS Technology, Inc.',
+   0x00019F: u'Phonex Broadband',
+   0x0001A0: u'Infinilink Corporation',
+   0x0001A1: u'Mag-Tek, Inc.',
+   0x0001A2: u'Logical Co., Ltd.',
+   0x0001A3: u'GENESYS LOGIC, INC.',
+   0x0001A4: u'Microlink Corporation',
+   0x0001A5: u'Nextcomm, Inc.',
+   0x0001A6: u'Scientific-Atlanta Arcodan A/S',
+   0x0001A7: u'UNEX TECHNOLOGY CORPORATION',
+   0x0001A8: u'Welltech Computer Co., Ltd.',
+   0x0001A9: u'BMW AG',
+   0x0001AA: u'Airspan Communications, Ltd.',
+   0x0001AB: u'Main Street Networks',
+   0x0001AC: u'Sitara Networks, Inc.',
+   0x0001AD: u'Coach Master International  d.b.a. CMI Worldwide, Inc.',
+   0x0001AE: u'Trex Enterprises',
+   0x0001AF: u'Motorola Computer Group',
+   0x0001B0: u'Fulltek Technology Co., Ltd.',
+   0x0001B1: u'General Bandwidth',
+   0x0001B2: u'Digital Processing Systems, Inc.',
+   0x0001B3: u'Precision Electronic Manufacturing',
+   0x0001B4: u'Wayport, Inc.',
+   0x0001B5: u'Turin Networks, Inc.',
+   0x0001B6: u'SAEJIN T&M Co., Ltd.',
+   0x0001B7: u'Centos, Inc.',
+   0x0001B8: u'Netsensity, Inc.',
+   0x0001B9: u'SKF Condition Monitoring',
+   0x0001BA: u'IC-Net, Inc.',
+   0x0001BB: u'Frequentis',
+   0x0001BC: u'Brains Corporation',
+   0x0001BD: u'Peterson Electro-Musical Products, Inc.',
+   0x0001BE: u'Gigalink Co., Ltd.',
+   0x0001BF: u'Teleforce Co., Ltd.',
+   0x0001C0: u'CompuLab, Ltd.',
+   0x0001C1: u'Vitesse Semiconductor Corporation',
+   0x0001C2: u'ARK Research Corp.',
+   0x0001C3: u'Acromag, Inc.',
+   0x0001C4: u'NeoWave, Inc.',
+   0x0001C5: u'Simpler Networks',
+   0x0001C6: u'Quarry Technologies',
+   0x0001C7: u'Cisco Systems, Inc.',
+   0x0001C8: u'THOMAS CONRAD CORP.',
+   0x0001C8: u'CONRAD CORP.',
+   0x0001C9: u'Cisco Systems, Inc.',
+   0x0001CA: u'Geocast Network Systems, Inc.',
+   0x0001CB: u'EVR',
+   0x0001CC: u'Japan Total Design Communication Co., Ltd.',
+   0x0001CD: u'ARtem',
+   0x0001CE: u'Custom Micro Products, Ltd.',
+   0x0001CF: u'Alpha Data Parallel Systems, Ltd.',
+   0x0001D0: u'VitalPoint, Inc.',
+   0x0001D1: u'CoNet Communications, Inc.',
+   0x0001D2: u'MacPower Peripherals, Ltd.',
+   0x0001D3: u'PAXCOMM, Inc.',
+   0x0001D4: u'Leisure Time, Inc.',
+   0x0001D5: u'HAEDONG INFO & COMM CO., LTD',
+   0x0001D6: u'MAN Roland Druckmaschinen AG',
+   0x0001D7: u'F5 Networks, Inc.',
+   0x0001D8: u'Teltronics, Inc.',
+   0x0001D9: u'Sigma, Inc.',
+   0x0001DA: u'WINCOMM Corporation',
+   0x0001DB: u'Freecom Technologies GmbH',
+   0x0001DC: u'Activetelco',
+   0x0001DD: u'Avail Networks',
+   0x0001DE: u'Trango Systems, Inc.',
+   0x0001DF: u'ISDN Communications, Ltd.',
+   0x0001E0: u'Fast Systems, Inc.',
+   0x0001E1: u'Kinpo Electronics, Inc.',
+   0x0001E2: u'Ando Electric Corporation',
+   0x0001E3: u'Siemens AG',
+   0x0001E4: u'Sitera, Inc.',
+   0x0001E5: u'Supernet, Inc.',
+   0x0001E6: u'Hewlett-Packard Company',
+   0x0001E7: u'Hewlett-Packard Company',
+   0x0001E8: u'Force10 Networks, Inc.',
+   0x0001E9: u'Litton Marine Systems B.V.',
+   0x0001EA: u'Cirilium Corp.',
+   0x0001EB: u'C-COM Corporation',
+   0x0001EC: u'Ericsson Group',
+   0x0001ED: u'SETA Corp.',
+   0x0001EE: u'Comtrol Europe, Ltd.',
+   0x0001EF: u'Camtel Technology Corp.',
+   0x0001F0: u'Tridium, Inc.',
+   0x0001F1: u'Innovative Concepts, Inc.',
+   0x0001F2: u'Mark of the Unicorn, Inc.',
+   0x0001F3: u'QPS, Inc.',
+   0x0001F4: u'Enterasys Networks',
+   0x0001F5: u'ERIM S.A.',
+   0x0001F6: u'Association of Musical Electronics Industry',
+   0x0001F7: u'Image Display Systems, Inc.',
+   0x0001F8: u'Adherent Systems, Ltd.',
+   0x0001F9: u'TeraGlobal Communications Corp.',
+   0x0001FA: u'HOROSCAS',
+   0x0001FB: u'DoTop Technology, Inc.',
+   0x0001FC: u'Keyence Corporation',
+   0x0001FD: u'Digital Voice Systems, Inc.',
+   0x0001FE: u'DIGITAL EQUIPMENT CORPORATION',
+   0x0001FF: u'Data Direct Networks, Inc.',
+   0x000200: u'Net & Sys Co., Ltd.',
+   0x000201: u'IFM Electronic gmbh',
+   0x000202: u'Amino Communications, Ltd.',
+   0x000203: u'Woonsang Telecom, Inc.',
+   0x000204: u'Bodmann Industries Elektronik GmbH',
+   0x000205: u'Hitachi Denshi, Ltd.',
+   0x000206: u'Telital R&D Denmark A/S',
+   0x000207: u'VisionGlobal Network Corp.',
+   0x000208: u'Unify Networks, Inc.',
+   0x000209: u'Shenzhen SED Information Technology Co., Ltd.',
+   0x00020A: u'Gefran Spa',
+   0x00020B: u'Native Networks, Inc.',
+   0x00020C: u'Metro-Optix',
+   0x00020D: u'Micronpc.com',
+   0x00020E: u'Laurel Networks, Inc.',
+   0x00020F: u'AATR',
+   0x000210: u'Fenecom',
+   0x000211: u'Nature Worldwide Technology Corp.',
+   0x000212: u'SierraCom',
+   0x000213: u'S.D.E.L.',
+   0x000214: u'DTVRO',
+   0x000215: u'Cotas Computer Technology A/B',
+   0x000216: u'Cisco Systems, Inc.',
+   0x000217: u'Cisco Systems, Inc.',
+   0x000218: u'Advanced Scientific Corp',
+   0x000219: u'Paralon Technologies',
+   0x00021A: u'Zuma Networks',
+   0x00021B: u'Kollmorgen-Servotronix',
+   0x00021C: u'Network Elements, Inc.',
+   0x00021D: u'Data General Communication Ltd.',
+   0x00021E: u'SIMTEL S.R.L.',
+   0x00021F: u'Aculab PLC',
+   0x000220: u'Canon Aptex, Inc.',
+   0x000221: u'DSP Application, Ltd.',
+   0x000222: u'Chromisys, Inc.',
+   0x000223: u'ClickTV',
+   0x000224: u'C-COR',
+   0x000225: u'Certus Technology, Inc.',
+   0x000226: u'XESystems, Inc.',
+   0x000227: u'ESD GmbH',
+   0x000228: u'Necsom, Ltd.',
+   0x000229: u'Adtec Corporation',
+   0x00022A: u'Asound Electronic',
+   0x00022B: u'SAXA, Inc.',
+   0x00022C: u'ABB Bomem, Inc.',
+   0x00022D: u'Agere Systems',
+   0x00022E: u'TEAC Corp. R& D',
+   0x00022F: u'P-Cube, Ltd.',
+   0x000230: u'Intersoft Electronics',
+   0x000231: u'Ingersoll-Rand',
+   0x000232: u'Avision, Inc.',
+   0x000233: u'Mantra Communications, Inc.',
+   0x000234: u'Imperial Technology, Inc.',
+   0x000235: u'Paragon Networks International',
+   0x000236: u'INIT GmbH',
+   0x000237: u'Cosmo Research Corp.',
+   0x000238: u'Serome Technology, Inc.',
+   0x000239: u'Visicom',
+   0x00023A: u'ZSK Stickmaschinen GmbH',
+   0x00023B: u'Redback Networks',
+   0x00023C: u'Creative Technology, Ltd.',
+   0x00023D: u'NuSpeed, Inc.',
+   0x00023E: u'Selta Telematica S.p.a',
+   0x00023F: u'Compal Electronics, Inc.',
+   0x000240: u'Seedek Co., Ltd.',
+   0x000241: u'Amer.com',
+   0x000242: u'Videoframe Systems',
+   0x000243: u'Raysis Co., Ltd.',
+   0x000244: u'SURECOM Technology Co.',
+   0x000245: u'Lampus Co, Ltd.',
+   0x000246: u'All-Win Tech Co., Ltd.',
+   0x000247: u'Great Dragon Information Technology (Group) Co., Ltd.',
+   0x000248: u'Pilz GmbH & Co.',
+   0x000249: u'Aviv Infocom Co, Ltd.',
+   0x00024A: u'Cisco Systems, Inc.',
+   0x00024B: u'Cisco Systems, Inc.',
+   0x00024C: u'SiByte, Inc.',
+   0x00024D: u'Mannesman Dematic Colby Pty. Ltd.',
+   0x00024E: u'Datacard Group',
+   0x00024F: u'IPM Datacom S.R.L.',
+   0x000250: u'Geyser Networks, Inc.',
+   0x000251: u'Soma Networks, Inc.',
+   0x000252: u'Carrier Corporation',
+   0x000253: u'Televideo, Inc.',
+   0x000254: u'WorldGate',
+   0x000255: u'IBM Corporation',
+   0x000256: u'Alpha Processor, Inc.',
+   0x000257: u'Microcom Corp.',
+   0x000258: u'Flying Packets Communications',
+   0x000259: u'Tsann Kuen China (Shanghai)Enterprise Co., Ltd. IT Group',
+   0x00025A: u'Catena Networks',
+   0x00025B: u'Cambridge Silicon Radio',
+   0x00025C: u'SCI Systems (Kunshan) Co., Ltd.',
+   0x00025D: u'Calix Networks',
+   0x00025E: u'High Technology Ltd',
+   0x00025F: u'Nortel Networks',
+   0x000260: u'Accordion Networks, Inc.',
+   0x000261: u'Tilgin AB',
+   0x000262: u'Soyo Group Soyo Com Tech Co., Ltd',
+   0x000263: u'UPS Manufacturing SRL',
+   0x000264: u'AudioRamp.com',
+   0x000265: u'Virditech Co. Ltd.',
+   0x000266: u'Thermalogic Corporation',
+   0x000267: u'NODE RUNNER, INC.',
+   0x000268: u'Harris Government Communications',
+   0x000269: u'Nadatel Co., Ltd',
+   0x00026A: u'Cocess Telecom Co., Ltd.',
+   0x00026B: u'BCM Computers Co., Ltd.',
+   0x00026C: u'Philips CFT',
+   0x00026D: u'Adept Telecom',
+   0x00026E: u'NeGeN Access, Inc.',
+   0x00026F: u'Senao International Co., Ltd.',
+   0x000270: u'Crewave Co., Ltd.',
+   0x000271: u'Vpacket Communications',
+   0x000272: u'CC&C Technologies, Inc.',
+   0x000273: u'Coriolis Networks',
+   0x000274: u'Tommy Technologies Corp.',
+   0x000275: u'SMART Technologies, Inc.',
+   0x000276: u'Primax Electronics Ltd.',
+   0x000277: u'Cash Systemes Industrie',
+   0x000278: u'Samsung Electro-Mechanics Co., Ltd.',
+   0x000279: u'Control Applications, Ltd.',
+   0x00027A: u'IOI Technology Corporation',
+   0x00027B: u'Amplify Net, Inc.',
+   0x00027C: u'Trilithic, Inc.',
+   0x00027D: u'Cisco Systems, Inc.',
+   0x00027E: u'Cisco Systems, Inc.',
+   0x00027F: u'ask-technologies.com',
+   0x000280: u'Mu Net, Inc.',
+   0x000281: u'Madge Ltd.',
+   0x000282: u'ViaClix, Inc.',
+   0x000283: u'Spectrum Controls, Inc.',
+   0x000284: u'AREVA T&D',
+   0x000285: u'Riverstone Networks',
+   0x000286: u'Occam Networks',
+   0x000287: u'Adapcom',
+   0x000288: u'GLOBAL VILLAGE COMMUNICATION',
+   0x000289: u'DNE Technologies',
+   0x00028A: u'Ambit Microsystems Corporation',
+   0x00028B: u'VDSL Systems OY',
+   0x00028C: u'Micrel-Synergy Semiconductor',
+   0x00028D: u'Movita Technologies, Inc.',
+   0x00028E: u'Rapid 5 Networks, Inc.',
+   0x00028F: u'Globetek, Inc.',
+   0x000290: u'Woorigisool, Inc.',
+   0x000291: u'Open Network Co., Ltd.',
+   0x000292: u'Logic Innovations, Inc.',
+   0x000293: u'Solid Data Systems',
+   0x000294: u'Tokyo Sokushin Co., Ltd.',
+   0x000295: u'IP.Access Limited',
+   0x000296: u'Lectron Co,. Ltd.',
+   0x000297: u'C-COR.net',
+   0x000298: u'Broadframe Corporation',
+   0x000299: u'Apex, Inc.',
+   0x00029A: u'Storage Apps',
+   0x00029B: u'Kreatel Communications AB',
+   0x00029C: u'3COM',
+   0x00029D: u'Merix Corp.',
+   0x00029E: u'Information Equipment Co., Ltd.',
+   0x00029F: u'L-3 Communication Aviation Recorders',
+   0x0002A0: u'Flatstack Ltd.',
+   0x0002A1: u'World Wide Packets',
+   0x0002A2: u'Hilscher GmbH',
+   0x0002A3: u'ABB Power Automation',
+   0x0002A4: u'AddPac Technology Co., Ltd.',
+   0x0002A5: u'Compaq Computer Corporation',
+   0x0002A6: u'Effinet Systems Co., Ltd.',
+   0x0002A7: u'Vivace Networks',
+   0x0002A8: u'Air Link Technology',
+   0x0002A9: u'RACOM, s.r.o.',
+   0x0002AA: u'PLcom Co., Ltd.',
+   0x0002AB: u'CTC Union Technologies Co., Ltd.',
+   0x0002AC: u'3PAR data',
+   0x0002AD: u'Pentax Corpotation',
+   0x0002AE: u'Scannex Electronics Ltd.',
+   0x0002AF: u'TeleCruz Technology, Inc.',
+   0x0002B0: u'Hokubu Communication & Industrial Co., Ltd.',
+   0x0002B1: u'Anritsu, Ltd.',
+   0x0002B2: u'Cablevision',
+   0x0002B3: u'Intel Corporation',
+   0x0002B4: u'DAPHNE',
+   0x0002B5: u'Avnet, Inc.',
+   0x0002B6: u'Acrosser Technology Co., Ltd.',
+   0x0002B7: u'Watanabe Electric Industry Co., Ltd.',
+   0x0002B8: u'WHI KONSULT AB',
+   0x0002B9: u'Cisco Systems, Inc.',
+   0x0002BA: u'Cisco Systems, Inc.',
+   0x0002BB: u'Continuous Computing',
+   0x0002BC: u'LVL 7 Systems, Inc.',
+   0x0002BD: u'Bionet Co., Ltd.',
+   0x0002BE: u'Totsu Engineering, Inc.',
+   0x0002BF: u'dotRocket, Inc.',
+   0x0002C0: u'Bencent Tzeng Industry Co., Ltd.',
+   0x0002C1: u'Innovative Electronic Designs, Inc.',
+   0x0002C2: u'Net Vision Telecom',
+   0x0002C3: u'Arelnet Ltd.',
+   0x0002C4: u'Vector International BUBA',
+   0x0002C5: u'Evertz Microsystems Ltd.',
+   0x0002C6: u'Data Track Technology PLC',
+   0x0002C7: u'ALPS ELECTRIC Co., Ltd.',
+   0x0002C8: u'Technocom Communications Technology (pte) Ltd',
+   0x0002C9: u'Mellanox Technologies',
+   0x0002CA: u'EndPoints, Inc.',
+   0x0002CB: u'TriState Ltd.',
+   0x0002CC: u'M.C.C.I',
+   0x0002CD: u'TeleDream, Inc.',
+   0x0002CE: u'FoxJet, Inc.',
+   0x0002CF: u'ZyGate Communications, Inc.',
+   0x0002D0: u'Comdial Corporation',
+   0x0002D1: u'Vivotek, Inc.',
+   0x0002D2: u'Workstation AG',
+   0x0002D3: u'NetBotz, Inc.',
+   0x0002D4: u'PDA Peripherals, Inc.',
+   0x0002D5: u'ACR',
+   0x0002D6: u'NICE Systems',
+   0x0002D7: u'EMPEG Ltd',
+   0x0002D8: u'BRECIS Communications Corporation',
+   0x0002D9: u'Reliable Controls',
+   0x0002DA: u'ExiO Communications, Inc.',
+   0x0002DB: u'NETSEC',
+   0x0002DC: u'Fujitsu General Limited',
+   0x0002DD: u'Bromax Communications, Ltd.',
+   0x0002DE: u'Astrodesign, Inc.',
+   0x0002DF: u'Net Com Systems, Inc.',
+   0x0002E0: u'ETAS GmbH',
+   0x0002E1: u'Integrated Network Corporation',
+   0x0002E2: u'NDC Infared Engineering',
+   0x0002E3: u'LITE-ON Communications, Inc.',
+   0x0002E4: u'JC HYUN Systems, Inc.',
+   0x0002E5: u'Timeware Ltd.',
+   0x0002E6: u'Gould Instrument Systems, Inc.',
+   0x0002E7: u'CAB GmbH & Co KG',
+   0x0002E8: u'E.D.&A.',
+   0x0002E9: u'CS Systemes De Securite - C3S',
+   0x0002EA: u'Focus Enhancements',
+   0x0002EB: u'Pico Communications',
+   0x0002EC: u'Maschoff Design Engineering',
+   0x0002ED: u'DXO Telecom Co., Ltd.',
+   0x0002EE: u'Nokia Danmark A/S',
+   0x0002EF: u'CCC Network Systems Group Ltd.',
+   0x0002F0: u'AME Optimedia Technology Co., Ltd.',
+   0x0002F1: u'Pinetron Co., Ltd.',
+   0x0002F2: u'eDevice, Inc.',
+   0x0002F3: u'Media Serve Co., Ltd.',
+   0x0002F4: u'PCTEL, Inc.',
+   0x0002F5: u'VIVE Synergies, Inc.',
+   0x0002F6: u'Equipe Communications',
+   0x0002F7: u'ARM',
+   0x0002F8: u'SEAKR Engineering, Inc.',
+   0x0002F9: u'Mimos Semiconductor SDN BHD',
+   0x0002FA: u'DX Antenna Co., Ltd.',
+   0x0002FB: u'Baumuller Aulugen-Systemtechnik GmbH',
+   0x0002FC: u'Cisco Systems, Inc.',
+   0x0002FD: u'Cisco Systems, Inc.',
+   0x0002FE: u'Viditec, Inc.',
+   0x0002FF: u'Handan BroadInfoCom',
+   0x000300: u'NetContinuum, Inc.',
+   0x000301: u'Avantas Networks Corporation',
+   0x000302: u'Charles Industries, Ltd.',
+   0x000303: u'JAMA Electronics Co., Ltd.',
+   0x000304: u'Pacific Broadband Communications',
+   0x000305: u'Smart Network Devices GmbH',
+   0x000306: u'Fusion In Tech Co., Ltd.',
+   0x000307: u'Secure Works, Inc.',
+   0x000308: u'AM Communications, Inc.',
+   0x000309: u'Texcel Technology PLC',
+   0x00030A: u'Argus Technologies',
+   0x00030B: u'Hunter Technology, Inc.',
+   0x00030C: u'Telesoft Technologies Ltd.',
+   0x00030D: u'Uniwill Computer Corp.',
+   0x00030E: u'Core Communications Co., Ltd.',
+   0x00030F: u'Digital China (Shanghai) Networks Ltd.',
+   0x000310: u'Link Evolution Corp.',
+   0x000311: u'Micro Technology Co., Ltd.',
+   0x000312: u'TR-Systemtechnik GmbH',
+   0x000313: u'Access Media SPA',
+   0x000314: u'Teleware Network Systems',
+   0x000315: u'Cidco Incorporated',
+   0x000316: u'Nobell Communications, Inc.',
+   0x000317: u'Merlin Systems, Inc.',
+   0x000318: u'Cyras Systems, Inc.',
+   0x000319: u'Infineon AG',
+   0x00031A: u'Beijing Broad Telecom Ltd., China',
+   0x00031B: u'Cellvision Systems, Inc.',
+   0x00031C: u'Svenska Hardvarufabriken AB',
+   0x00031D: u'Taiwan Commate Computer, Inc.',
+   0x00031E: u'Optranet, Inc.',
+   0x00031F: u'Condev Ltd.',
+   0x000320: u'Xpeed, Inc.',
+   0x000321: u'Reco Research Co., Ltd.',
+   0x000322: u'IDIS Co., Ltd.',
+   0x000323: u'Cornet Technology, Inc.',
+   0x000324: u'SANYO Multimedia Tottori Co., Ltd.',
+   0x000325: u'Arima Computer Corp.',
+   0x000326: u'Iwasaki Information Systems Co., Ltd.',
+   0x000327: u'ACT\'L',
+   0x000328: u'Mace Group, Inc.',
+   0x000329: u'F3, Inc.',
+   0x00032A: u'UniData Communication Systems, Inc.',
+   0x00032B: u'GAI Datenfunksysteme GmbH',
+   0x00032C: u'ABB Industrie AG',
+   0x00032D: u'IBASE Technology, Inc.',
+   0x00032E: u'Scope Information Management, Ltd.',
+   0x00032F: u'Global Sun Technology, Inc.',
+   0x000330: u'Imagenics, Co., Ltd.',
+   0x000331: u'Cisco Systems, Inc.',
+   0x000332: u'Cisco Systems, Inc.',
+   0x000333: u'Digitel Co., Ltd.',
+   0x000334: u'Newport Electronics',
+   0x000335: u'Mirae Technology',
+   0x000336: u'Zetes Technologies',
+   0x000337: u'Vaone, Inc.',
+   0x000338: u'Oak Technology',
+   0x000339: u'Eurologic Systems, Ltd.',
+   0x00033A: u'Silicon Wave, Inc.',
+   0x00033B: u'TAMI Tech Co., Ltd.',
+   0x00033C: u'Daiden Co., Ltd.',
+   0x00033D: u'ILSHin Lab',
+   0x00033E: u'Tateyama System Laboratory Co., Ltd.',
+   0x00033F: u'BigBand Networks, Ltd.',
+   0x000340: u'Floware Wireless Systems, Ltd.',
+   0x000341: u'Axon Digital Design',
+   0x000342: u'Nortel Networks',
+   0x000343: u'Martin Professional A/S',
+   0x000344: u'Tietech.Co., Ltd.',
+   0x000345: u'Routrek Networks Corporation',
+   0x000346: u'Hitachi Kokusai Electric, Inc.',
+   0x000347: u'Intel Corporation',
+   0x000348: u'Norscan Instruments, Ltd.',
+   0x000349: u'Vidicode Datacommunicatie B.V.',
+   0x00034A: u'RIAS Corporation',
+   0x00034B: u'Nortel Networks',
+   0x00034C: u'Shanghai DigiVision Technology Co., Ltd.',
+   0x00034D: u'Chiaro Networks, Ltd.',
+   0x00034E: u'Pos Data Company, Ltd.',
+   0x00034F: u'Sur-Gard Security',
+   0x000350: u'BTICINO SPA',
+   0x000351: u'Diebold, Inc.',
+   0x000352: u'Colubris Networks',
+   0x000353: u'Mitac, Inc.',
+   0x000354: u'Fiber Logic Communications',
+   0x000355: u'TeraBeam Internet Systems',
+   0x000356: u'Wincor Nixdorf GmbH & Co KG',
+   0x000357: u'Intervoice-Brite, Inc.',
+   0x000358: u'Hanyang Digitech Co., Ltd.',
+   0x000359: u'DigitalSis',
+   0x00035A: u'Photron Limited',
+   0x00035B: u'BridgeWave Communications',
+   0x00035C: u'Saint Song Corp.',
+   0x00035D: u'Bosung Hi-Net Co., Ltd.',
+   0x00035E: u'Metropolitan Area Networks, Inc.',
+   0x00035F: u'Prueftechnik Condition Monitoring GmbH & Co. KG',
+   0x000360: u'PAC Interactive Technology, Inc.',
+   0x000361: u'Widcomm, Inc.',
+   0x000362: u'Vodtel Communications, Inc.',
+   0x000363: u'Miraesys Co., Ltd.',
+   0x000364: u'Scenix Semiconductor, Inc.',
+   0x000365: u'Kira Information & Communications, Ltd.',
+   0x000366: u'ASM Pacific Technology',
+   0x000367: u'Jasmine Networks, Inc.',
+   0x000368: u'Embedone Co., Ltd.',
+   0x000369: u'Nippon Antenna Co., Ltd.',
+   0x00036A: u'Mainnet, Ltd.',
+   0x00036B: u'Cisco Systems, Inc.',
+   0x00036C: u'Cisco Systems, Inc.',
+   0x00036D: u'Runtop, Inc.',
+   0x00036E: u'Nicon Systems (Pty) Limited',
+   0x00036F: u'Telsey SPA',
+   0x000370: u'NXTV, Inc.',
+   0x000371: u'Acomz Networks Corp.',
+   0x000372: u'ULAN',
+   0x000373: u'Aselsan A.S',
+   0x000374: u'Hunter Watertech',
+   0x000375: u'NetMedia, Inc.',
+   0x000376: u'Graphtec Technology, Inc.',
+   0x000377: u'Gigabit Wireless',
+   0x000378: u'HUMAX Co., Ltd.',
+   0x000379: u'Proscend Communications, Inc.',
+   0x00037A: u'Taiyo Yuden Co., Ltd.',
+   0x00037B: u'IDEC IZUMI Corporation',
+   0x00037C: u'Coax Media',
+   0x00037D: u'Stellcom',
+   0x00037E: u'PORTech Communications, Inc.',
+   0x00037F: u'Atheros Communications, Inc.',
+   0x000380: u'SSH Communications Security Corp.',
+   0x000381: u'Ingenico International',
+   0x000382: u'A-One Co., Ltd.',
+   0x000383: u'Metera Networks, Inc.',
+   0x000384: u'AETA',
+   0x000385: u'Actelis Networks, Inc.',
+   0x000386: u'Ho Net, Inc.',
+   0x000387: u'Blaze Network Products',
+   0x000388: u'Fastfame Technology Co., Ltd.',
+   0x000389: u'Plantronics',
+   0x00038A: u'America Online, Inc.',
+   0x00038B: u'PLUS-ONE I&T, Inc.',
+   0x00038C: u'Total Impact',
+   0x00038D: u'PCS Revenue Control Systems, Inc.',
+   0x00038E: u'Atoga Systems, Inc.',
+   0x00038F: u'Weinschel Corporation',
+   0x000390: u'Digital Video Communications, Inc.',
+   0x000391: u'Advanced Digital Broadcast, Ltd.',
+   0x000392: u'Hyundai Teletek Co., Ltd.',
+   0x000393: u'Apple Computer, Inc.',
+   0x000394: u'Connect One',
+   0x000395: u'California Amplifier',
+   0x000396: u'EZ Cast Co., Ltd.',
+   0x000397: u'Watchfront Electronics',
+   0x000398: u'WISI',
+   0x000399: u'Dongju Informations & Communications Co., Ltd.',
+   0x00039A: u'SiConnect',
+   0x00039B: u'NetChip Technology, Inc.',
+   0x00039C: u'OptiMight Communications, Inc.',
+   0x00039D: u'BENQ CORPORATION',
+   0x00039E: u'Tera System Co., Ltd.',
+   0x00039F: u'Cisco Systems, Inc.',
+   0x0003A0: u'Cisco Systems, Inc.',
+   0x0003A1: u'HIPER Information & Communication, Inc.',
+   0x0003A2: u'Catapult Communications',
+   0x0003A3: u'MAVIX, Ltd.',
+   0x0003A4: u'Data Storage and Information Management',
+   0x0003A5: u'Medea Corporation',
+   0x0003A6: u'Traxit Technology, Inc.',
+   0x0003A7: u'Unixtar Technology, Inc.',
+   0x0003A8: u'IDOT Computers, Inc.',
+   0x0003A9: u'AXCENT Media AG',
+   0x0003AA: u'Watlow',
+   0x0003AB: u'Bridge Information Systems',
+   0x0003AC: u'Fronius Schweissmaschinen',
+   0x0003AD: u'Emerson Energy Systems AB',
+   0x0003AE: u'Allied Advanced Manufacturing Pte, Ltd.',
+   0x0003AF: u'Paragea Communications',
+   0x0003B0: u'Xsense Technology Corp.',
+   0x0003B1: u'Hospira Inc.',
+   0x0003B2: u'Radware',
+   0x0003B3: u'IA Link Systems Co., Ltd.',
+   0x0003B4: u'Macrotek International Corp.',
+   0x0003B5: u'Entra Technology Co.',
+   0x0003B6: u'QSI Corporation',
+   0x0003B7: u'ZACCESS Systems',
+   0x0003B8: u'NetKit Solutions, LLC',
+   0x0003B9: u'Hualong Telecom Co., Ltd.',
+   0x0003BA: u'Sun Microsystems',
+   0x0003BB: u'Signal Communications Limited',
+   0x0003BC: u'COT GmbH',
+   0x0003BD: u'OmniCluster Technologies, Inc.',
+   0x0003BE: u'Netility',
+   0x0003BF: u'Centerpoint Broadband Technologies, Inc.',
+   0x0003C0: u'RFTNC Co., Ltd.',
+   0x0003C1: u'Packet Dynamics Ltd',
+   0x0003C2: u'Solphone K.K.',
+   0x0003C3: u'Micronik Multimedia',
+   0x0003C4: u'Tomra Systems ASA',
+   0x0003C5: u'Mobotix AG',
+   0x0003C6: u'ICUE Systems, Inc.',
+   0x0003C7: u'hopf Elektronik GmbH',
+   0x0003C8: u'CML Emergency Services',
+   0x0003C9: u'TECOM Co., Ltd.',
+   0x0003CA: u'MTS Systems Corp.',
+   0x0003CB: u'Nippon Systems Development Co., Ltd.',
+   0x0003CC: u'Momentum Computer, Inc.',
+   0x0003CD: u'Clovertech, Inc.',
+   0x0003CE: u'ETEN Technologies, Inc.',
+   0x0003CF: u'Muxcom, Inc.',
+   0x0003D0: u'KOANKEISO Co., Ltd.',
+   0x0003D1: u'Takaya Corporation',
+   0x0003D2: u'Crossbeam Systems, Inc.',
+   0x0003D3: u'Internet Energy Systems, Inc.',
+   0x0003D4: u'Alloptic, Inc.',
+   0x0003D5: u'Advanced Communications Co., Ltd.',
+   0x0003D6: u'RADVision, Ltd.',
+   0x0003D7: u'NextNet Wireless, Inc.',
+   0x0003D8: u'iMPath Networks, Inc.',
+   0x0003D9: u'Secheron SA',
+   0x0003DA: u'Takamisawa Cybernetics Co., Ltd.',
+   0x0003DB: u'Apogee Electronics Corp.',
+   0x0003DC: u'Lexar Media, Inc.',
+   0x0003DD: u'Comark Corp.',
+   0x0003DE: u'OTC Wireless',
+   0x0003DF: u'Desana Systems',
+   0x0003E0: u'RadioFrame Networks, Inc.',
+   0x0003E1: u'Winmate Communication, Inc.',
+   0x0003E2: u'Comspace Corporation',
+   0x0003E3: u'Cisco Systems, Inc.',
+   0x0003E4: u'Cisco Systems, Inc.',
+   0x0003E5: u'Hermstedt SG',
+   0x0003E6: u'Entone Technologies, Inc.',
+   0x0003E7: u'Logostek Co. Ltd.',
+   0x0003E8: u'Wavelength Digital Limited',
+   0x0003E9: u'Akara Canada, Inc.',
+   0x0003EA: u'Mega System Technologies, Inc.',
+   0x0003EB: u'Atrica',
+   0x0003EC: u'ICG Research, Inc.',
+   0x0003ED: u'Shinkawa Electric Co., Ltd.',
+   0x0003EE: u'MKNet Corporation',
+   0x0003EF: u'Oneline AG',
+   0x0003F0: u'Redfern Broadband Networks',
+   0x0003F1: u'Cicada Semiconductor, Inc.',
+   0x0003F2: u'Seneca Networks',
+   0x0003F3: u'Dazzle Multimedia, Inc.',
+   0x0003F4: u'NetBurner',
+   0x0003F5: u'Chip2Chip',
+   0x0003F6: u'Allegro Networks, Inc.',
+   0x0003F7: u'Plast-Control GmbH',
+   0x0003F8: u'SanCastle Technologies, Inc.',
+   0x0003F9: u'Pleiades Communications, Inc.',
+   0x0003FA: u'TiMetra Networks',
+   0x0003FB: u'Toko Seiki Company, Ltd.',
+   0x0003FC: u'Intertex Data AB',
+   0x0003FD: u'Cisco Systems, Inc.',
+   0x0003FE: u'Cisco Systems, Inc.',
+   0x0003FF: u'Microsoft Corporation',
+   0x000400: u'LEXMARK INTERNATIONAL, INC.',
+   0x000401: u'Osaki Electric Co., Ltd.',
+   0x000402: u'Nexsan Technologies, Ltd.',
+   0x000403: u'Nexsi Corporation',
+   0x000404: u'Makino Milling Machine Co., Ltd.',
+   0x000405: u'ACN Technologies',
+   0x000406: u'Fa. Metabox AG',
+   0x000407: u'Topcon Positioning Systems, Inc.',
+   0x000408: u'Sanko Electronics Co., Ltd.',
+   0x000409: u'Cratos Networks',
+   0x00040A: u'Sage Systems',
+   0x00040B: u'3com Europe Ltd.',
+   0x00040C: u'KANNO Work\'s Ltd.',
+   0x00040D: u'Avaya, Inc.',
+   0x00040E: u'AVM GmbH',
+   0x00040F: u'Asus Network Technologies, Inc.',
+   0x000410: u'Spinnaker Networks, Inc.',
+   0x000411: u'Inkra Networks, Inc.',
+   0x000412: u'WaveSmith Networks, Inc.',
+   0x000413: u'SNOM Technology AG',
+   0x000414: u'Umezawa Musen Denki Co., Ltd.',
+   0x000415: u'Rasteme Systems Co., Ltd.',
+   0x000416: u'Parks S/A Comunicacoes Digitais',
+   0x000417: u'ELAU AG',
+   0x000418: u'Teltronic S.A.U.',
+   0x000419: u'Fibercycle Networks, Inc.',
+   0x00041A: u'ines GmbH',
+   0x00041B: u'Digital Interfaces Ltd.',
+   0x00041C: u'ipDialog, Inc.',
+   0x00041D: u'Corega of America',
+   0x00041E: u'Shikoku Instrumentation Co., Ltd.',
+   0x00041F: u'Sony Computer Entertainment, Inc.',
+   0x000420: u'Slim Devices, Inc.',
+   0x000421: u'Ocular Networks',
+   0x000422: u'Gordon Kapes, Inc.',
+   0x000423: u'Intel Corporation',
+   0x000424: u'TMC s.r.l.',
+   0x000425: u'Atmel Corporation',
+   0x000426: u'Autosys',
+   0x000427: u'Cisco Systems, Inc.',
+   0x000428: u'Cisco Systems, Inc.',
+   0x000429: u'Pixord Corporation',
+   0x00042A: u'Wireless Networks, Inc.',
+   0x00042B: u'IT Access Co., Ltd.',
+   0x00042C: u'Minet, Inc.',
+   0x00042D: u'Sarian Systems, Ltd.',
+   0x00042E: u'Netous Technologies, Ltd.',
+   0x00042F: u'International Communications Products, Inc.',
+   0x000430: u'Netgem',
+   0x000431: u'GlobalStreams, Inc.',
+   0x000432: u'Voyetra Turtle Beach, Inc.',
+   0x000433: u'Cyberboard A/S',
+   0x000434: u'Accelent Systems, Inc.',
+   0x000435: u'Comptek International, Inc.',
+   0x000436: u'ELANsat Technologies, Inc.',
+   0x000437: u'Powin Information Technology, Inc.',
+   0x000438: u'Nortel Networks',
+   0x000439: u'Rosco Entertainment Technology, Inc.',
+   0x00043A: u'Intelligent Telecommunications, Inc.',
+   0x00043B: u'Lava Computer Mfg., Inc.',
+   0x00043C: u'SONOS Co., Ltd.',
+   0x00043D: u'INDEL AG',
+   0x00043E: u'Telencomm',
+   0x00043F: u'Electronic Systems Technology, Inc.',
+   0x000440: u'cyberPIXIE, Inc.',
+   0x000441: u'Half Dome Systems, Inc.',
+   0x000442: u'NACT',
+   0x000443: u'Agilent Technologies, Inc.',
+   0x000444: u'Western Multiplex Corporation',
+   0x000445: u'LMS Skalar Instruments GmbH',
+   0x000446: u'CYZENTECH Co., Ltd.',
+   0x000447: u'Acrowave Systems Co., Ltd.',
+   0x000448: u'Polaroid Professional Imaging',
+   0x000449: u'Mapletree Networks',
+   0x00044A: u'iPolicy Networks, Inc.',
+   0x00044B: u'NVIDIA',
+   0x00044C: u'JENOPTIK',
+   0x00044D: u'Cisco Systems, Inc.',
+   0x00044E: u'Cisco Systems, Inc.',
+   0x00044F: u'Leukhardt Systemelektronik GmbH',
+   0x000450: u'DMD Computers SRL',
+   0x000451: u'Medrad, Inc.',
+   0x000452: u'RocketLogix, Inc.',
+   0x000453: u'YottaYotta, Inc.',
+   0x000454: u'Quadriga UK',
+   0x000455: u'ANTARA.net',
+   0x000456: u'PipingHot Networks',
+   0x000457: u'Universal Access Technology, Inc.',
+   0x000458: u'Fusion X Co., Ltd.',
+   0x000459: u'Veristar Corporation',
+   0x00045A: u'The Linksys Group, Inc.',
+   0x00045B: u'Techsan Electronics Co., Ltd.',
+   0x00045C: u'Mobiwave Pte Ltd',
+   0x00045D: u'BEKA Elektronik',
+   0x00045E: u'PolyTrax Information Technology AG',
+   0x00045F: u'Evalue Technology, Inc.',
+   0x000460: u'Knilink Technology, Inc.',
+   0x000461: u'EPOX Computer Co., Ltd.',
+   0x000462: u'DAKOS Data & Communication Co., Ltd.',
+   0x000463: u'Bosch Security Systems',
+   0x000464: u'Fantasma Networks, Inc.',
+   0x000465: u'i.s.t isdn-support technik GmbH',
+   0x000466: u'ARMITEL Co.',
+   0x000467: u'Wuhan Research Institute of MII',
+   0x000468: u'Vivity, Inc.',
+   0x000469: u'Innocom, Inc.',
+   0x00046A: u'Navini Networks',
+   0x00046B: u'Palm Wireless, Inc.',
+   0x00046C: u'Cyber Technology Co., Ltd.',
+   0x00046D: u'Cisco Systems, Inc.',
+   0x00046E: u'Cisco Systems, Inc.',
+   0x00046F: u'Digitel S/A Industria Eletronica',
+   0x000470: u'ipUnplugged AB',
+   0x000471: u'IPrad',
+   0x000472: u'Telelynx, Inc.',
+   0x000473: u'Photonex Corporation',
+   0x000474: u'LEGRAND',
+   0x000475: u'3 Com Corporation',
+   0x000476: u'3 Com Corporation',
+   0x000477: u'Scalant Systems, Inc.',
+   0x000478: u'G. Star Technology Corporation',
+   0x000479: u'Radius Co., Ltd.',
+   0x00047A: u'AXXESSIT ASA',
+   0x00047B: u'Schlumberger',
+   0x00047C: u'Skidata AG',
+   0x00047D: u'Pelco',
+   0x00047E: u'Optelecom=NKF',
+   0x00047F: u'Chr. Mayr GmbH & Co. KG',
+   0x000480: u'Foundry Networks, Inc.',
+   0x000481: u'Econolite Control Products, Inc.',
+   0x000482: u'Medialogic Corp.',
+   0x000483: u'Deltron Technology, Inc.',
+   0x000484: u'Amann GmbH',
+   0x000485: u'PicoLight',
+   0x000486: u'ITTC, University of Kansas',
+   0x000487: u'Cogency Semiconductor, Inc.',
+   0x000488: u'Eurotherm Controls',
+   0x000489: u'YAFO Networks, Inc.',
+   0x00048A: u'Temia Vertriebs GmbH',
+   0x00048B: u'Poscon Corporation',
+   0x00048C: u'Nayna Networks, Inc.',
+   0x00048D: u'Tone Commander Systems, Inc.',
+   0x00048E: u'Ohm Tech Labs, Inc.',
+   0x00048F: u'TD Systems Corp.',
+   0x000490: u'Optical Access',
+   0x000491: u'Technovision, Inc.',
+   0x000492: u'Hive Internet, Ltd.',
+   0x000493: u'Tsinghua Unisplendour Co., Ltd.',
+   0x000494: u'Breezecom, Ltd.',
+   0x000495: u'Tejas Networks',
+   0x000496: u'Extreme Networks',
+   0x000497: u'MacroSystem Digital Video AG',
+   0x000498: u'Mahi Networks',
+   0x000499: u'Chino Corporation',
+   0x00049A: u'Cisco Systems, Inc.',
+   0x00049B: u'Cisco Systems, Inc.',
+   0x00049C: u'Surgient Networks, Inc.',
+   0x00049D: u'Ipanema Technologies',
+   0x00049E: u'Wirelink Co., Ltd.',
+   0x00049F: u'Freescale Semiconductor',
+   0x0004A0: u'Verity Instruments, Inc.',
+   0x0004A1: u'Pathway Connectivity',
+   0x0004A2: u'L.S.I. Japan Co., Ltd.',
+   0x0004A3: u'Microchip Technology, Inc.',
+   0x0004A4: u'NetEnabled, Inc.',
+   0x0004A5: u'Barco Projection Systems NV',
+   0x0004A6: u'SAF Tehnika Ltd.',
+   0x0004A7: u'FabiaTech Corporation',
+   0x0004A8: u'Broadmax Technologies, Inc.',
+   0x0004A9: u'SandStream Technologies, Inc.',
+   0x0004AA: u'Jetstream Communications',
+   0x0004AB: u'Comverse Network Systems, Inc.',
+   0x0004AC: u'IBM CORP.',
+   0x0004AD: u'Malibu Networks',
+   0x0004AE: u'Liquid Metronics',
+   0x0004AF: u'Digital Fountain, Inc.',
+   0x0004B0: u'ELESIGN Co., Ltd.',
+   0x0004B1: u'Signal Technology, Inc.',
+   0x0004B2: u'ESSEGI SRL',
+   0x0004B3: u'Videotek, Inc.',
+   0x0004B4: u'CIAC',
+   0x0004B5: u'Equitrac Corporation',
+   0x0004B6: u'Stratex Networks, Inc.',
+   0x0004B7: u'AMB i.t. Holding',
+   0x0004B8: u'Kumahira Co., Ltd.',
+   0x0004B9: u'S.I. Soubou, Inc.',
+   0x0004BA: u'KDD Media Will Corporation',
+   0x0004BB: u'Bardac Corporation',
+   0x0004BC: u'Giantec, Inc.',
+   0x0004BD: u'Motorola BCS',
+   0x0004BE: u'OptXCon, Inc.',
+   0x0004BF: u'VersaLogic Corp.',
+   0x0004C0: u'Cisco Systems, Inc.',
+   0x0004C1: u'Cisco Systems, Inc.',
+   0x0004C2: u'Magnipix, Inc.',
+   0x0004C3: u'CASTOR Informatique',
+   0x0004C4: u'Allen & Heath Limited',
+   0x0004C5: u'ASE Technologies, USA',
+   0x0004C6: u'Yamaha Motor Co., Ltd.',
+   0x0004C7: u'NetMount',
+   0x0004C8: u'LIBA Maschinenfabrik GmbH',
+   0x0004C9: u'Micro Electron Co., Ltd.',
+   0x0004CA: u'FreeMs Corp.',
+   0x0004CB: u'Tdsoft Communication, Ltd.',
+   0x0004CC: u'Peek Traffic B.V.',
+   0x0004CD: u'Informedia Research Group',
+   0x0004CE: u'Patria Ailon',
+   0x0004CF: u'Seagate Technology',
+   0x0004D0: u'Softlink s.r.o.',
+   0x0004D1: u'Drew Technologies, Inc.',
+   0x0004D2: u'Adcon Telemetry GmbH',
+   0x0004D3: u'Toyokeiki Co., Ltd.',
+   0x0004D4: u'Proview Electronics Co., Ltd.',
+   0x0004D5: u'Hitachi Communication Systems, Inc.',
+   0x0004D6: u'Takagi Industrial Co., Ltd.',
+   0x0004D7: u'Omitec Instrumentation Ltd.',
+   0x0004D8: u'IPWireless, Inc.',
+   0x0004D9: u'Titan Electronics, Inc.',
+   0x0004DA: u'Relax Technology, Inc.',
+   0x0004DB: u'Tellus Group Corp.',
+   0x0004DC: u'Nortel Networks',
+   0x0004DD: u'Cisco Systems, Inc.',
+   0x0004DE: u'Cisco Systems, Inc.',
+   0x0004DF: u'Teracom Telematica Ltda.',
+   0x0004E0: u'Procket Networks',
+   0x0004E1: u'Infinior Microsystems',
+   0x0004E2: u'SMC Networks, Inc.',
+   0x0004E3: u'Accton Technology Corp.',
+   0x0004E4: u'Daeryung Ind., Inc.',
+   0x0004E5: u'Glonet Systems, Inc.',
+   0x0004E6: u'Banyan Network Private Limited',
+   0x0004E7: u'Lightpointe Communications, Inc',
+   0x0004E8: u'IER, Inc.',
+   0x0004E9: u'Infiniswitch Corporation',
+   0x0004EA: u'Hewlett-Packard Company',
+   0x0004EB: u'Paxonet Communications, Inc.',
+   0x0004EC: u'Memobox SA',
+   0x0004ED: u'Billion Electric Co., Ltd.',
+   0x0004EE: u'Lincoln Electric Company',
+   0x0004EF: u'Polestar Corp.',
+   0x0004F0: u'International Computers, Ltd',
+   0x0004F1: u'WhereNet',
+   0x0004F2: u'Polycom',
+   0x0004F3: u'FS FORTH-SYSTEME GmbH',
+   0x0004F4: u'Infinite Electronics Inc.',
+   0x0004F5: u'SnowShore Networks, Inc.',
+   0x0004F6: u'Amphus',
+   0x0004F7: u'Omega Band, Inc.',
+   0x0004F8: u'QUALICABLE TV Industria E Com., Ltda',
+   0x0004F9: u'Xtera Communications, Inc.',
+   0x0004FA: u'NBS Technologies Inc.',
+   0x0004FB: u'Commtech, Inc.',
+   0x0004FC: u'Stratus Computer (DE), Inc.',
+   0x0004FD: u'Japan Control Engineering Co., Ltd.',
+   0x0004FE: u'Pelago Networks',
+   0x0004FF: u'Acronet Co., Ltd.',
+   0x000500: u'Cisco Systems, Inc.',
+   0x000501: u'Cisco Systems, Inc.',
+   0x000502: u'APPLE COMPUTER',
+   0x000503: u'ICONAG',
+   0x000504: u'Naray Information & Communication Enterprise',
+   0x000505: u'Systems Integration Solutions, Inc.',
+   0x000506: u'Reddo Networks AB',
+   0x000507: u'Fine Appliance Corp.',
+   0x000508: u'Inetcam, Inc.',
+   0x000509: u'AVOC Nishimura Ltd.',
+   0x00050A: u'ICS Spa',
+   0x00050B: u'SICOM Systems, Inc.',
+   0x00050C: u'Network Photonics, Inc.',
+   0x00050D: u'Midstream Technologies, Inc.',
+   0x00050E: u'3ware, Inc.',
+   0x00050F: u'Tanaka S/S Ltd.',
+   0x000510: u'Infinite Shanghai Communication Terminals Ltd.',
+   0x000511: u'Complementary Technologies Ltd',
+   0x000512: u'MeshNetworks, Inc.',
+   0x000513: u'VTLinx Multimedia Systems, Inc.',
+   0x000514: u'KDT Systems Co., Ltd.',
+   0x000515: u'Nuark Co., Ltd.',
+   0x000516: u'SMART Modular Technologies',
+   0x000517: u'Shellcomm, Inc.',
+   0x000518: u'Jupiters Technology',
+   0x000519: u'Siemens Building Technologies AG,',
+   0x00051A: u'3Com Europe Ltd.',
+   0x00051B: u'Magic Control Technology Corporation',
+   0x00051C: u'Xnet Technology Corp.',
+   0x00051D: u'Airocon, Inc.',
+   0x00051E: u'Brocade Communications Systems, Inc.',
+   0x00051F: u'Taijin Media Co., Ltd.',
+   0x000520: u'Smartronix, Inc.',
+   0x000521: u'Control Microsystems',
+   0x000522: u'LEA*D Corporation, Inc.',
+   0x000523: u'AVL List GmbH',
+   0x000524: u'BTL System (HK) Limited',
+   0x000525: u'Puretek Industrial Co., Ltd.',
+   0x000526: u'IPAS GmbH',
+   0x000527: u'SJ Tek Co. Ltd',
+   0x000528: u'New Focus, Inc.',
+   0x000529: u'Shanghai Broadan Communication Technology Co., Ltd',
+   0x00052A: u'Ikegami Tsushinki Co., Ltd.',
+   0x00052B: u'HORIBA, Ltd.',
+   0x00052C: u'Supreme Magic Corporation',
+   0x00052D: u'Zoltrix International Limited',
+   0x00052E: u'Cinta Networks',
+   0x00052F: u'Leviton Voice and Data',
+   0x000530: u'Andiamo Systems, Inc.',
+   0x000531: u'Cisco Systems, Inc.',
+   0x000532: u'Cisco Systems, Inc.',
+   0x000533: u'Sanera Systems, Inc.',
+   0x000534: u'Northstar Engineering Ltd.',
+   0x000535: u'Chip PC Ltd.',
+   0x000536: u'Danam Communications, Inc.',
+   0x000537: u'Nets Technology Co., Ltd.',
+   0x000538: u'Merilus, Inc.',
+   0x000539: u'A Brand New World in Sweden AB',
+   0x00053A: u'Willowglen Services Pte Ltd',
+   0x00053B: u'Harbour Networks Ltd., Co. Beijing',
+   0x00053C: u'Xircom',
+   0x00053D: u'Agere Systems',
+   0x00053E: u'KID Systeme GmbH',
+   0x00053F: u'VisionTek, Inc.',
+   0x000540: u'FAST Corporation',
+   0x000541: u'Advanced Systems Co., Ltd.',
+   0x000542: u'Otari, Inc.',
+   0x000543: u'IQ Wireless GmbH',
+   0x000544: u'Valley Technologies, Inc.',
+   0x000545: u'Internet Photonics',
+   0x000546: u'KDDI Network & Solultions Inc.',
+   0x000547: u'Starent Networks',
+   0x000548: u'Disco Corporation',
+   0x000549: u'Salira Optical Network Systems',
+   0x00054A: u'Ario Data Networks, Inc.',
+   0x00054B: u'Micro Innovation AG',
+   0x00054C: u'RF Innovations Pty Ltd',
+   0x00054D: u'Brans Technologies, Inc.',
+   0x00054E: u'Philips Components',
+   0x00054F: u'PRIVATE',
+   0x000550: u'Vcomms Limited',
+   0x000551: u'F & S Elektronik Systeme GmbH',
+   0x000552: u'Xycotec Computer GmbH',
+   0x000553: u'DVC Company, Inc.',
+   0x000554: u'Rangestar Wireless',
+   0x000555: u'Japan Cash Machine Co., Ltd.',
+   0x000556: u'360 Systems',
+   0x000557: u'Agile TV Corporation',
+   0x000558: u'Synchronous, Inc.',
+   0x000559: u'Intracom S.A.',
+   0x00055A: u'Power Dsine Ltd.',
+   0x00055B: u'Charles Industries, Ltd.',
+   0x00055C: u'Kowa Company, Ltd.',
+   0x00055D: u'D-Link Systems, Inc.',
+   0x00055E: u'Cisco Systems, Inc.',
+   0x00055F: u'Cisco Systems, Inc.',
+   0x000560: u'LEADER COMM.CO., LTD',
+   0x000561: u'nac Image Technology, Inc.',
+   0x000562: u'Digital View Limited',
+   0x000563: u'J-Works, Inc.',
+   0x000564: u'Tsinghua Bitway Co., Ltd.',
+   0x000565: u'Tailyn Communication Company Ltd.',
+   0x000566: u'Secui.com Corporation',
+   0x000567: u'Etymonic Design, Inc.',
+   0x000568: u'Piltofish Networks AB',
+   0x000569: u'VMWARE, Inc.',
+   0x00056A: u'Heuft Systemtechnik GmbH',
+   0x00056B: u'C.P. Technology Co., Ltd.',
+   0x00056C: u'Hung Chang Co., Ltd.',
+   0x00056D: u'Pacific Corporation',
+   0x00056E: u'National Enhance Technology, Inc.',
+   0x00056F: u'Innomedia Technologies Pvt. Ltd.',
+   0x000570: u'Baydel Ltd.',
+   0x000571: u'Seiwa Electronics Co.',
+   0x000572: u'Deonet Co., Ltd.',
+   0x000573: u'Cisco Systems, Inc.',
+   0x000574: u'Cisco Systems, Inc.',
+   0x000575: u'CDS-Electronics BV',
+   0x000576: u'NSM Technology Ltd.',
+   0x000577: u'SM Information & Communication',
+   0x000578: u'PRIVATE',
+   0x000579: u'Universal Control Solution Corp.',
+   0x00057A: u'Hatteras Networks',
+   0x00057B: u'Chung Nam Electronic Co., Ltd.',
+   0x00057C: u'RCO Security AB',
+   0x00057D: u'Sun Communications, Inc.',
+   0x00057E: u'Eckelmann Steuerungstechnik GmbH',
+   0x00057F: u'Acqis Technology',
+   0x000580: u'Fibrolan Ltd.',
+   0x000581: u'Snell & Wilcox Ltd.',
+   0x000582: u'ClearCube Technology',
+   0x000583: u'ImageCom Limited',
+   0x000584: u'AbsoluteValue Systems, Inc.',
+   0x000585: u'Juniper Networks, Inc.',
+   0x000586: u'Lucent Technologies',
+   0x000587: u'Locus, Incorporated',
+   0x000588: u'Sensoria Corp.',
+   0x000589: u'National Datacomputer',
+   0x00058A: u'Netcom Co., Ltd.',
+   0x00058B: u'IPmental, Inc.',
+   0x00058C: u'Opentech Inc.',
+   0x00058D: u'Lynx Photonic Networks, Inc.',
+   0x00058E: u'Flextronics International GmbH & Co. Nfg. KG',
+   0x00058F: u'CLCsoft co.',
+   0x000590: u'Swissvoice Ltd.',
+   0x000591: u'Active Silicon Ltd.',
+   0x000592: u'Pultek Corp.',
+   0x000593: u'Grammar Engine Inc.',
+   0x000594: u'IXXAT Automation GmbH',
+   0x000595: u'Alesis Corporation',
+   0x000596: u'Genotech Co., Ltd.',
+   0x000597: u'Eagle Traffic Control Systems',
+   0x000598: u'CRONOS S.r.l.',
+   0x000599: u'DRS Test and Energy Management or DRS-TEM',
+   0x00059A: u'Cisco Systems, Inc.',
+   0x00059B: u'Cisco Systems, Inc.',
+   0x00059C: u'Kleinknecht GmbH, Ing. Buero',
+   0x00059D: u'Daniel Computing Systems, Inc.',
+   0x00059E: u'Zinwell Corporation',
+   0x00059F: u'Yotta Networks, Inc.',
+   0x0005A0: u'MOBILINE Kft.',
+   0x0005A1: u'Zenocom',
+   0x0005A2: u'CELOX Networks',
+   0x0005A3: u'QEI, Inc.',
+   0x0005A4: u'Lucid Voice Ltd.',
+   0x0005A5: u'KOTT',
+   0x0005A6: u'Extron Electronics',
+   0x0005A7: u'Hyperchip, Inc.',
+   0x0005A8: u'WYLE ELECTRONICS',
+   0x0005A9: u'Princeton Networks, Inc.',
+   0x0005AA: u'Moore Industries International Inc.',
+   0x0005AB: u'Cyber Fone, Inc.',
+   0x0005AC: u'Northern Digital, Inc.',
+   0x0005AD: u'Topspin Communications, Inc.',
+   0x0005AE: u'Mediaport USA',
+   0x0005AF: u'InnoScan Computing A/S',
+   0x0005B0: u'Korea Computer Technology Co., Ltd.',
+   0x0005B1: u'ASB Technology BV',
+   0x0005B2: u'Medison Co., Ltd.',
+   0x0005B3: u'Asahi-Engineering Co., Ltd.',
+   0x0005B4: u'Aceex Corporation',
+   0x0005B5: u'Broadcom Technologies',
+   0x0005B6: u'INSYS Microelectronics GmbH',
+   0x0005B7: u'Arbor Technology Corp.',
+   0x0005B8: u'Electronic Design Associates, Inc.',
+   0x0005B9: u'Airvana, Inc.',
+   0x0005BA: u'Area Netwoeks, Inc.',
+   0x0005BB: u'Myspace AB',
+   0x0005BC: u'Resorsys Ltd.',
+   0x0005BD: u'ROAX BV',
+   0x0005BE: u'Kongsberg Seatex AS',
+   0x0005BF: u'JustEzy Technology, Inc.',
+   0x0005C0: u'Digital Network Alacarte Co., Ltd.',
+   0x0005C1: u'A-Kyung Motion, Inc.',
+   0x0005C2: u'Soronti, Inc.',
+   0x0005C3: u'Pacific Instruments, Inc.',
+   0x0005C4: u'Telect, Inc.',
+   0x0005C5: u'Flaga HF',
+   0x0005C6: u'Triz Communications',
+   0x0005C7: u'I/F-COM A/S',
+   0x0005C8: u'VERYTECH',
+   0x0005C9: u'LG Innotek',
+   0x0005CA: u'Hitron Technology, Inc.',
+   0x0005CB: u'ROIS Technologies, Inc.',
+   0x0005CC: u'Sumtel Communications, Inc.',
+   0x0005CD: u'Denon, Ltd.',
+   0x0005CE: u'Prolink Microsystems Corporation',
+   0x0005CF: u'Thunder River Technologies, Inc.',
+   0x0005D0: u'Solinet Systems',
+   0x0005D1: u'Metavector Technologies',
+   0x0005D2: u'DAP Technologies',
+   0x0005D3: u'eProduction Solutions, Inc.',
+   0x0005D4: u'FutureSmart Networks, Inc.',
+   0x0005D5: u'Speedcom Wireless',
+   0x0005D6: u'Titan Wireless',
+   0x0005D7: u'Vista Imaging, Inc.',
+   0x0005D8: u'Arescom, Inc.',
+   0x0005D9: u'Techno Valley, Inc.',
+   0x0005DA: u'Apex Automationstechnik',
+   0x0005DB: u'Nentec GmbH',
+   0x0005DC: u'Cisco Systems, Inc.',
+   0x0005DD: u'Cisco Systems, Inc.',
+   0x0005DE: u'Gi Fone Korea, Inc.',
+   0x0005DF: u'Electronic Innovation, Inc.',
+   0x0005E0: u'Empirix Corp.',
+   0x0005E1: u'Trellis Photonics, Ltd.',
+   0x0005E2: u'Creativ Network Technologies',
+   0x0005E3: u'LightSand Communications, Inc.',
+   0x0005E4: u'Red Lion Controls L.P.',
+   0x0005E5: u'Renishaw PLC',
+   0x0005E6: u'Egenera, Inc.',
+   0x0005E7: u'Netrake Corp.',
+   0x0005E8: u'TurboWave, Inc.',
+   0x0005E9: u'Unicess Network, Inc.',
+   0x0005EA: u'Rednix',
+   0x0005EB: u'Blue Ridge Networks, Inc.',
+   0x0005EC: u'Mosaic Systems Inc.',
+   0x0005ED: u'Technikum Joanneum GmbH',
+   0x0005EE: u'BEWATOR Group',
+   0x0005EF: u'ADOIR Digital Technology',
+   0x0005F0: u'SATEC',
+   0x0005F1: u'Vrcom, Inc.',
+   0x0005F2: u'Power R, Inc.',
+   0x0005F3: u'Weboyn',
+   0x0005F4: u'System Base Co., Ltd.',
+   0x0005F5: u'OYO Geospace Corp.',
+   0x0005F6: u'Young Chang Co. Ltd.',
+   0x0005F7: u'Analog Devices, Inc.',
+   0x0005F8: u'Real Time Access, Inc.',
+   0x0005F9: u'TOA Corporation',
+   0x0005FA: u'IPOptical, Inc.',
+   0x0005FB: u'ShareGate, Inc.',
+   0x0005FC: u'Schenck Pegasus Corp.',
+   0x0005FD: u'PacketLight Networks Ltd.',
+   0x0005FE: u'Traficon N.V.',
+   0x0005FF: u'SNS Solutions, Inc.',
+   0x000600: u'Toshiba Teli Corporation',
+   0x000601: u'Otanikeiki Co., Ltd.',
+   0x000602: u'Cirkitech Electronics Co.',
+   0x000603: u'Baker Hughes Inc.',
+   0x000604: u'@Track Communications, Inc.',
+   0x000605: u'Inncom International, Inc.',
+   0x000606: u'RapidWAN, Inc.',
+   0x000607: u'Omni Directional Control Technology Inc.',
+   0x000608: u'At-Sky SAS',
+   0x000609: u'Crossport Systems',
+   0x00060A: u'Blue2space',
+   0x00060B: u'Paceline Systems Corporation',
+   0x00060C: u'Melco Industries, Inc.',
+   0x00060D: u'Wave7 Optics',
+   0x00060E: u'IGYS Systems, Inc.',
+   0x00060F: u'Narad Networks Inc',
+   0x000610: u'Abeona Networks Inc',
+   0x000611: u'Zeus Wireless, Inc.',
+   0x000612: u'Accusys, Inc.',
+   0x000613: u'Kawasaki Microelectronics Incorporated',
+   0x000614: u'Prism Holdings',
+   0x000615: u'Kimoto Electric Co., Ltd.',
+   0x000616: u'Tel Net Co., Ltd.',
+   0x000617: u'Redswitch Inc.',
+   0x000618: u'DigiPower Manufacturing Inc.',
+   0x000619: u'Connection Technology Systems',
+   0x00061A: u'Zetari Inc.',
+   0x00061B: u'Portable Systems, IBM Japan Co, Ltd',
+   0x00061C: u'Hoshino Metal Industries, Ltd.',
+   0x00061D: u'MIP Telecom, Inc.',
+   0x00061E: u'Maxan Systems',
+   0x00061F: u'Vision Components GmbH',
+   0x000620: u'Serial System Ltd.',
+   0x000621: u'Hinox, Co., Ltd.',
+   0x000622: u'Chung Fu Chen Yeh Enterprise Corp.',
+   0x000623: u'MGE UPS Systems France',
+   0x000624: u'Gentner Communications Corp.',
+   0x000625: u'The Linksys Group, Inc.',
+   0x000626: u'MWE GmbH',
+   0x000627: u'Uniwide Technologies, Inc.',
+   0x000628: u'Cisco Systems, Inc.',
+   0x000629: u'IBM CORPORATION',
+   0x00062A: u'Cisco Systems, Inc.',
+   0x00062B: u'INTRASERVER TECHNOLOGY',
+   0x00062C: u'Network Robots, Inc.',
+   0x00062D: u'TouchStar Technologies, L.L.C.',
+   0x00062E: u'Aristos Logic Corp.',
+   0x00062F: u'Pivotech Systems Inc.',
+   0x000630: u'Adtranz Sweden',
+   0x000631: u'Optical Solutions, Inc.',
+   0x000632: u'Mesco Engineering GmbH',
+   0x000633: u'Smiths Heimann Biometric Systems',
+   0x000634: u'GTE Airfone Inc.',
+   0x000635: u'PacketAir Networks, Inc.',
+   0x000636: u'Jedai Broadband Networks',
+   0x000637: u'Toptrend-Meta Information (ShenZhen) Inc.',
+   0x000638: u'Sungjin C&C Co., Ltd.',
+   0x000639: u'Newtec',
+   0x00063A: u'Dura Micro, Inc.',
+   0x00063B: u'Arcturus Networks, Inc.',
+   0x00063C: u'NMI Electronics Ltd',
+   0x00063D: u'Microwave Data Systems Inc.',
+   0x00063E: u'Opthos Inc.',
+   0x00063F: u'Everex Communications Inc.',
+   0x000640: u'White Rock Networks',
+   0x000641: u'ITCN',
+   0x000642: u'Genetel Systems Inc.',
+   0x000643: u'SONO Computer Co., Ltd.',
+   0x000644: u'NEIX Inc.',
+   0x000645: u'Meisei Electric Co. Ltd.',
+   0x000646: u'ShenZhen XunBao Network Technology Co Ltd',
+   0x000647: u'Etrali S.A.',
+   0x000648: u'Seedsware, Inc.',
+   0x000649: u'Quante',
+   0x00064A: u'Honeywell Co., Ltd. (KOREA)',
+   0x00064B: u'Alexon Co., Ltd.',
+   0x00064C: u'Invicta Networks, Inc.',
+   0x00064D: u'Sencore',
+   0x00064E: u'Broad Net Technology Inc.',
+   0x00064F: u'PRO-NETS Technology Corporation',
+   0x000650: u'Tiburon Networks, Inc.',
+   0x000651: u'Aspen Networks Inc.',
+   0x000652: u'Cisco Systems, Inc.',
+   0x000653: u'Cisco Systems, Inc.',
+   0x000654: u'Maxxio Technologies',
+   0x000655: u'Yipee, Inc.',
+   0x000656: u'Tactel AB',
+   0x000657: u'Market Central, Inc.',
+   0x000658: u'Helmut Fischer GmbH & Co. KG',
+   0x000659: u'EAL (Apeldoorn) B.V.',
+   0x00065A: u'Strix Systems',
+   0x00065B: u'Dell Computer Corp.',
+   0x00065C: u'Malachite Technologies, Inc.',
+   0x00065D: u'Heidelberg Web Systems',
+   0x00065E: u'Photuris, Inc.',
+   0x00065F: u'ECI Telecom - NGTS Ltd.',
+   0x000660: u'NADEX Co., Ltd.',
+   0x000661: u'NIA Home Technologies Corp.',
+   0x000662: u'MBM Technology Ltd.',
+   0x000663: u'Human Technology Co., Ltd.',
+   0x000664: u'Fostex Corporation',
+   0x000665: u'Sunny Giken, Inc.',
+   0x000666: u'Roving Networks',
+   0x000667: u'Tripp Lite',
+   0x000668: u'Vicon Industries Inc.',
+   0x000669: u'Datasound Laboratories Ltd',
+   0x00066A: u'InfiniCon Systems, Inc.',
+   0x00066B: u'Sysmex Corporation',
+   0x00066C: u'Robinson Corporation',
+   0x00066D: u'Compuprint S.P.A.',
+   0x00066E: u'Delta Electronics, Inc.',
+   0x00066F: u'Korea Data Systems',
+   0x000670: u'Upponetti Oy',
+   0x000671: u'Softing AG',
+   0x000672: u'Netezza',
+   0x000673: u'Optelecom-nkf',
+   0x000674: u'Spectrum Control, Inc.',
+   0x000675: u'Banderacom, Inc.',
+   0x000676: u'Novra Technologies Inc.',
+   0x000677: u'SICK AG',
+   0x000678: u'Marantz Japan, Inc.',
+   0x000679: u'Konami Corporation',
+   0x00067A: u'JMP Systems',
+   0x00067B: u'Toplink C&C Corporation',
+   0x00067C: u'CISCO SYSTEMS, INC.',
+   0x00067D: u'Takasago Ltd.',
+   0x00067E: u'WinCom Systems, Inc.',
+   0x00067F: u'Rearden Steel Technologies',
+   0x000680: u'Card Access, Inc.',
+   0x000681: u'Goepel Electronic GmbH',
+   0x000682: u'Convedia',
+   0x000683: u'Bravara Communications, Inc.',
+   0x000684: u'Biacore AB',
+   0x000685: u'NetNearU Corporation',
+   0x000686: u'ZARDCOM Co., Ltd.',
+   0x000687: u'Omnitron Systems Technology, Inc.',
+   0x000688: u'Telways Communication Co., Ltd.',
+   0x000689: u'yLez Technologies Pte Ltd',
+   0x00068A: u'NeuronNet Co. Ltd. R&D Center',
+   0x00068B: u'AirRunner Technologies, Inc.',
+   0x00068C: u'3Com Corporation',
+   0x00068D: u'SEPATON, Inc.',
+   0x00068E: u'HID Corporation',
+   0x00068F: u'Telemonitor, Inc.',
+   0x000690: u'Euracom Communication GmbH',
+   0x000691: u'PT Inovacao',
+   0x000692: u'Intruvert Networks, Inc.',
+   0x000693: u'Flexus Computer Technology, Inc.',
+   0x000694: u'Mobillian Corporation',
+   0x000695: u'Ensure Technologies, Inc.',
+   0x000696: u'Advent Networks',
+   0x000697: u'R & D Center',
+   0x000698: u'egnite Software GmbH',
+   0x000699: u'Vida Design Co.',
+   0x00069A: u'e & Tel',
+   0x00069B: u'AVT Audio Video Technologies GmbH',
+   0x00069C: u'Transmode Systems AB',
+   0x00069D: u'Petards Mobile Intelligence',
+   0x00069E: u'UNIQA, Inc.',
+   0x00069F: u'Kuokoa Networks',
+   0x0006A0: u'Mx Imaging',
+   0x0006A1: u'Celsian Technologies, Inc.',
+   0x0006A2: u'Microtune, Inc.',
+   0x0006A3: u'Bitran Corporation',
+   0x0006A4: u'INNOWELL Corp.',
+   0x0006A5: u'PINON Corp.',
+   0x0006A6: u'Artistic Licence (UK) Ltd',
+   0x0006A7: u'Primarion',
+   0x0006A8: u'KC Technology, Inc.',
+   0x0006A9: u'Universal Instruments Corp.',
+   0x0006AA: u'Miltope Corporation',
+   0x0006AB: u'W-Link Systems, Inc.',
+   0x0006AC: u'Intersoft Co.',
+   0x0006AD: u'KB Electronics Ltd.',
+   0x0006AE: u'Himachal Futuristic Communications Ltd',
+   0x0006AF: u'PRIVATE',
+   0x0006B0: u'Comtech EF Data Corp.',
+   0x0006B1: u'Sonicwall',
+   0x0006B2: u'Linxtek Co.',
+   0x0006B3: u'Diagraph Corporation',
+   0x0006B4: u'Vorne Industries, Inc.',
+   0x0006B5: u'Luminent, Inc.',
+   0x0006B6: u'Nir-Or Israel Ltd.',
+   0x0006B7: u'TELEM GmbH',
+   0x0006B8: u'Bandspeed Pty Ltd',
+   0x0006B9: u'A5TEK Corp.',
+   0x0006BA: u'Westwave Communications',
+   0x0006BB: u'ATI Technologies Inc.',
+   0x0006BC: u'Macrolink, Inc.',
+   0x0006BD: u'BNTECHNOLOGY Co., Ltd.',
+   0x0006BE: u'Baumer Optronic GmbH',
+   0x0006BF: u'Accella Technologies Co., Ltd.',
+   0x0006C0: u'United Internetworks, Inc.',
+   0x0006C1: u'CISCO SYSTEMS, INC.',
+   0x0006C2: u'Smartmatic Corporation',
+   0x0006C3: u'Schindler Elevators Ltd.',
+   0x0006C4: u'Piolink Inc.',
+   0x0006C5: u'INNOVI Technologies Limited',
+   0x0006C6: u'lesswire AG',
+   0x0006C7: u'RFNET Technologies Pte Ltd (S)',
+   0x0006C8: u'Sumitomo Metal Micro Devices, Inc.',
+   0x0006C9: u'Technical Marketing Research, Inc.',
+   0x0006CA: u'American Computer & Digital Components, Inc. (ACDC)',
+   0x0006CB: u'Jotron Electronics A/S',
+   0x0006CC: u'JMI Electronics Co., Ltd.',
+   0x0006CD: u'Kodak IL Ltd.',
+   0x0006CE: u'DATENO',
+   0x0006CF: u'Thales Avionics In-Flight Systems, LLC',
+   0x0006D0: u'Elgar Electronics Corp.',
+   0x0006D1: u'Tahoe Networks, Inc.',
+   0x0006D2: u'Tundra Semiconductor Corp.',
+   0x0006D3: u'Alpha Telecom, Inc. U.S.A.',
+   0x0006D4: u'Interactive Objects, Inc.',
+   0x0006D5: u'Diamond Systems Corp.',
+   0x0006D6: u'Cisco Systems, Inc.',
+   0x0006D7: u'Cisco Systems, Inc.',
+   0x0006D8: u'Maple Optical Systems',
+   0x0006D9: u'IPM-Net S.p.A.',
+   0x0006DA: u'ITRAN Communications Ltd.',
+   0x0006DB: u'ICHIPS Co., Ltd.',
+   0x0006DC: u'Syabas Technology (Amquest)',
+   0x0006DD: u'AT & T Laboratories - Cambridge Ltd',
+   0x0006DE: u'Flash Technology',
+   0x0006DF: u'AIDONIC Corporation',
+   0x0006E0: u'MAT Co., Ltd.',
+   0x0006E1: u'Techno Trade s.a',
+   0x0006E2: u'Ceemax Technology Co., Ltd.',
+   0x0006E3: u'Quantitative Imaging Corporation',
+   0x0006E4: u'Citel Technologies Ltd.',
+   0x0006E5: u'Fujian Newland Computer Ltd. Co.',
+   0x0006E6: u'DongYang Telecom Co., Ltd.',
+   0x0006E7: u'Bit Blitz Communications Inc.',
+   0x0006E8: u'Optical Network Testing, Inc.',
+   0x0006E9: u'Intime Corp.',
+   0x0006EA: u'ELZET80 Mikrocomputer GmbH&Co. KG',
+   0x0006EB: u'Global Data',
+   0x0006EC: u'M/A COM Private Radio System Inc.',
+   0x0006ED: u'Inara Networks',
+   0x0006EE: u'Shenyang Neu-era Information & Technology Stock Co., Ltd',
+   0x0006EF: u'Maxxan Systems, Inc.',
+   0x0006F0: u'Digeo, Inc.',
+   0x0006F1: u'Optillion',
+   0x0006F2: u'Platys Communications',
+   0x0006F3: u'AcceLight Networks',
+   0x0006F4: u'Prime Electronics & Satellitics Inc.',
+   0x0006F8: u'CPU Technology, Inc.',
+   0x0006F9: u'Mitsui Zosen Systems Research Inc.',
+   0x0006FA: u'IP SQUARE Co, Ltd.',
+   0x0006FB: u'Hitachi Printing Solutions, Ltd.',
+   0x0006FC: u'Fnet Co., Ltd.',
+   0x0006FD: u'Comjet Information Systems Corp.',
+   0x0006FE: u'Celion Networks, Inc.',
+   0x0006FF: u'Sheba Systems Co., Ltd.',
+   0x000700: u'Zettamedia Korea',
+   0x000701: u'RACAL-DATACOM',
+   0x000702: u'Varian Medical Systems',
+   0x000703: u'CSEE Transport',
+   0x000705: u'Endress & Hauser GmbH & Co',
+   0x000706: u'Sanritz Corporation',
+   0x000707: u'Interalia Inc.',
+   0x000708: u'Bitrage Inc.',
+   0x000709: u'Westerstrand Urfabrik AB',
+   0x00070A: u'Unicom Automation Co., Ltd.',
+   0x00070B: u'Octal, SA',
+   0x00070C: u'SVA-Intrusion.com Co. Ltd.',
+   0x00070D: u'Cisco Systems Inc.',
+   0x00070E: u'Cisco Systems Inc.',
+   0x00070F: u'Fujant, Inc.',
+   0x000710: u'Adax, Inc.',
+   0x000711: u'Acterna',
+   0x000712: u'JAL Information Technology',
+   0x000713: u'IP One, Inc.',
+   0x000714: u'Brightcom',
+   0x000715: u'General Research of Electronics, Inc.',
+   0x000716: u'J & S Marine Ltd.',
+   0x000717: u'Wieland Electric GmbH',
+   0x000718: u'iCanTek Co., Ltd.',
+   0x000719: u'Mobiis Co., Ltd.',
+   0x00071A: u'Finedigital Inc.',
+   0x00071B: u'Position Technology Inc.',
+   0x00071C: u'AT&T Fixed Wireless Services',
+   0x00071D: u'Satelsa Sistemas Y Aplicaciones De Telecomunicaciones, S.A.',
+   0x00071E: u'Tri-M Engineering / Nupak Dev. Corp.',
+   0x00071F: u'European Systems Integration',
+   0x000720: u'Trutzschler GmbH & Co. KG',
+   0x000721: u'Formac Elektronik GmbH',
+   0x000722: u'Nielsen Media Research',
+   0x000723: u'ELCON Systemtechnik GmbH',
+   0x000724: u'Telemax Co., Ltd.',
+   0x000725: u'Bematech International Corp.',
+   0x000727: u'Zi Corporation (HK) Ltd.',
+   0x000728: u'Neo Telecom',
+   0x000729: u'Kistler Instrumente AG',
+   0x00072A: u'Innovance Networks',
+   0x00072B: u'Jung Myung Telecom Co., Ltd.',
+   0x00072C: u'Fabricom',
+   0x00072D: u'CNSystems',
+   0x00072E: u'North Node AB',
+   0x00072F: u'Intransa, Inc.',
+   0x000730: u'Hutchison OPTEL Telecom Technology Co., Ltd.',
+   0x000731: u'Spiricon, Inc.',
+   0x000732: u'AAEON Technology Inc.',
+   0x000733: u'DANCONTROL Engineering',
+   0x000734: u'ONStor, Inc.',
+   0x000735: u'Flarion Technologies, Inc.',
+   0x000736: u'Data Video Technologies Co., Ltd.',
+   0x000737: u'Soriya Co. Ltd.',
+   0x000738: u'Young Technology Co., Ltd.',
+   0x000739: u'Motion Media Technology Ltd.',
+   0x00073A: u'Inventel Systemes',
+   0x00073B: u'Tenovis GmbH & Co KG',
+   0x00073C: u'Telecom Design',
+   0x00073D: u'Nanjing Postel Telecommunications Co., Ltd.',
+   0x00073E: u'China Great-Wall Computer Shenzhen Co., Ltd.',
+   0x00073F: u'Woojyun Systec Co., Ltd.',
+   0x000740: u'Melco Inc.',
+   0x000741: u'Sierra Automated Systems',
+   0x000742: u'Current Technologies',
+   0x000743: u'Chelsio Communications',
+   0x000744: u'Unico, Inc.',
+   0x000745: u'Radlan Computer Communications Ltd.',
+   0x000746: u'TURCK, Inc.',
+   0x000747: u'Mecalc',
+   0x000748: u'The Imaging Source Europe',
+   0x000749: u'CENiX Inc.',
+   0x00074A: u'Carl Valentin GmbH',
+   0x00074B: u'Daihen Corporation',
+   0x00074C: u'Beicom Inc.',
+   0x00074D: u'Zebra Technologies Corp.',
+   0x00074E: u'Naughty boy co., Ltd.',
+   0x00074F: u'Cisco Systems, Inc.',
+   0x000750: u'Cisco Systems, Inc.',
+   0x000751: u'm.u.t. - GmbH',
+   0x000752: u'Rhythm Watch Co., Ltd.',
+   0x000753: u'Beijing Qxcomm Technology Co., Ltd.',
+   0x000754: u'Xyterra Computing, Inc.',
+   0x000755: u'Lafon SA',
+   0x000756: u'Juyoung Telecom',
+   0x000757: u'Topcall International AG',
+   0x000758: u'Dragonwave',
+   0x000759: u'Boris Manufacturing Corp.',
+   0x00075A: u'Air Products and Chemicals, Inc.',
+   0x00075B: u'Gibson Guitars',
+   0x00075C: u'Eastman Kodak Company',
+   0x00075D: u'Celleritas Inc.',
+   0x00075E: u'Ametek Power Instruments',
+   0x00075F: u'VCS Video Communication Systems AG',
+   0x000760: u'TOMIS Information & Telecom Corp.',
+   0x000761: u'Logitech SA',
+   0x000762: u'Group Sense Limited',
+   0x000763: u'Sunniwell Cyber Tech. Co., Ltd.',
+   0x000764: u'YoungWoo Telecom Co. Ltd.',
+   0x000765: u'Jade Quantum Technologies, Inc.',
+   0x000766: u'Chou Chin Industrial Co., Ltd.',
+   0x000767: u'Yuxing Electronics Company Limited',
+   0x000768: u'Danfoss A/S',
+   0x000769: u'Italiana Macchi SpA',
+   0x00076A: u'NEXTEYE Co., Ltd.',
+   0x00076B: u'Stralfors AB',
+   0x00076C: u'Daehanet, Inc.',
+   0x00076D: u'Flexlight Networks',
+   0x00076E: u'Sinetica Corporation Limited',
+   0x00076F: u'Synoptics Limited',
+   0x000770: u'Locusnetworks Corporation',
+   0x000771: u'Embedded System Corporation',
+   0x000772: u'Alcatel Shanghai Bell Co., Ltd.',
+   0x000773: u'Ascom Powerline Communications Ltd.',
+   0x000774: u'GuangZhou Thinker Technology Co. Ltd.',
+   0x000775: u'Valence Semiconductor, Inc.',
+   0x000776: u'Federal APD',
+   0x000777: u'Motah Ltd.',
+   0x000778: u'GERSTEL GmbH & Co. KG',
+   0x000779: u'Sungil Telecom Co., Ltd.',
+   0x00077A: u'Infoware System Co., Ltd.',
+   0x00077B: u'Millimetrix Broadband Networks',
+   0x00077C: u'OnTime Networks',
+   0x00077E: u'Elrest GmbH',
+   0x00077F: u'J Communications Co., Ltd.',
+   0x000780: u'Bluegiga Technologies OY',
+   0x000781: u'Itron Inc.',
+   0x000782: u'Nauticus Networks, Inc.',
+   0x000783: u'SynCom Network, Inc.',
+   0x000784: u'Cisco Systems Inc.',
+   0x000785: u'Cisco Systems Inc.',
+   0x000786: u'Wireless Networks Inc.',
+   0x000787: u'Idea System Co., Ltd.',
+   0x000788: u'Clipcomm, Inc.',
+   0x000789: u'Eastel Systems Corporation',
+   0x00078A: u'Mentor Data System Inc.',
+   0x00078B: u'Wegener Communications, Inc.',
+   0x00078C: u'Elektronikspecialisten i Borlange AB',
+   0x00078D: u'NetEngines Ltd.',
+   0x00078E: u'Garz & Friche GmbH',
+   0x00078F: u'Emkay Innovative Products',
+   0x000790: u'Tri-M Technologies (s) Limited',
+   0x000791: u'International Data Communications, Inc.',
+   0x000792: u'Suetron Electronic GmbH',
+   0x000793: u'Shin Satellite Public Company Limited',
+   0x000794: u'Simple Devices, Inc.',
+   0x000795: u'Elitegroup Computer System Co. (ECS)',
+   0x000796: u'LSI Systems, Inc.',
+   0x000797: u'Netpower Co., Ltd.',
+   0x000798: u'Selea SRL',
+   0x000799: u'Tipping Point Technologies, Inc.',
+   0x00079A: u'SmartSight Networks Inc.',
+   0x00079B: u'Aurora Networks',
+   0x00079C: u'Golden Electronics Technology Co., Ltd.',
+   0x00079D: u'Musashi Co., Ltd.',
+   0x00079E: u'Ilinx Co., Ltd.',
+   0x00079F: u'Action Digital Inc.',
+   0x0007A0: u'e-Watch Inc.',
+   0x0007A1: u'VIASYS Healthcare GmbH',
+   0x0007A2: u'Opteon Corporation',
+   0x0007A3: u'Ositis Software, Inc.',
+   0x0007A4: u'GN Netcom Ltd.',
+   0x0007A5: u'Y.D.K Co. Ltd.',
+   0x0007A6: u'Home Automation, Inc.',
+   0x0007A7: u'A-Z Inc.',
+   0x0007A8: u'Haier Group Technologies Ltd.',
+   0x0007A9: u'Novasonics',
+   0x0007AA: u'Quantum Data Inc.',
+   0x0007AC: u'Eolring',
+   0x0007AD: u'Pentacon GmbH Foto-und Feinwerktechnik',
+   0x0007AE: u'Britestream Networks, Inc.',
+   0x0007AF: u'N-Tron Corp.',
+   0x0007B0: u'Office Details, Inc.',
+   0x0007B1: u'Equator Technologies',
+   0x0007B2: u'Transaccess S.A.',
+   0x0007B3: u'Cisco Systems Inc.',
+   0x0007B4: u'Cisco Systems Inc.',
+   0x0007B5: u'Any One Wireless Ltd.',
+   0x0007B6: u'Telecom Technology Ltd.',
+   0x0007B7: u'Samurai Ind. Prods Eletronicos Ltda',
+   0x0007B8: u'American Predator Corp.',
+   0x0007B9: u'Ginganet Corporation',
+   0x0007BA: u'UTStarcom, Inc.',
+   0x0007BB: u'Candera Inc.',
+   0x0007BC: u'Identix Inc.',
+   0x0007BD: u'Radionet Ltd.',
+   0x0007BE: u'DataLogic SpA',
+   0x0007BF: u'Armillaire Technologies, Inc.',
+   0x0007C0: u'NetZerver Inc.',
+   0x0007C1: u'Overture Networks, Inc.',
+   0x0007C2: u'Netsys Telecom',
+   0x0007C3: u'Cirpack',
+   0x0007C4: u'JEAN Co. Ltd.',
+   0x0007C5: u'Gcom, Inc.',
+   0x0007C6: u'VDS Vosskuhler GmbH',
+   0x0007C7: u'Synectics Systems Limited',
+   0x0007C8: u'Brain21, Inc.',
+   0x0007C9: u'Technol Seven Co., Ltd.',
+   0x0007CA: u'Creatix Polymedia Ges Fur Kommunikaitonssysteme',
+   0x0007CB: u'Freebox SA',
+   0x0007CC: u'Kaba Benzing GmbH',
+   0x0007CD: u'NMTEL Co., Ltd.',
+   0x0007CE: u'Cabletime Limited',
+   0x0007CF: u'Anoto AB',
+   0x0007D0: u'Automat Engenharia de Automaoa Ltda.',
+   0x0007D1: u'Spectrum Signal Processing Inc.',
+   0x0007D2: u'Logopak Systeme',
+   0x0007D3: u'Stork Digital Imaging B.V.',
+   0x0007D4: u'Zhejiang Yutong Network Communication Co Ltd.',
+   0x0007D5: u'3e Technologies Int;., Inc.',
+   0x0007D6: u'Commil Ltd.',
+   0x0007D7: u'Caporis Networks AG',
+   0x0007D8: u'Hitron Systems Inc.',
+   0x0007D9: u'Splicecom',
+   0x0007DA: u'Neuro Telecom Co., Ltd.',
+   0x0007DB: u'Kirana Networks, Inc.',
+   0x0007DC: u'Atek Co, Ltd.',
+   0x0007DD: u'Cradle Technologies',
+   0x0007DE: u'eCopilt AB',
+   0x0007DF: u'Vbrick Systems Inc.',
+   0x0007E0: u'Palm Inc.',
+   0x0007E1: u'WIS Communications Co. Ltd.',
+   0x0007E2: u'Bitworks, Inc.',
+   0x0007E3: u'Navcom Technology, Inc.',
+   0x0007E4: u'SoftRadio Co., Ltd.',
+   0x0007E5: u'Coup Corporation',
+   0x0007E6: u'edgeflow Canada Inc.',
+   0x0007E7: u'FreeWave Technologies',
+   0x0007E8: u'St. Bernard Software',
+   0x0007E9: u'Intel Corporation',
+   0x0007EA: u'Massana, Inc.',
+   0x0007EB: u'Cisco Systems Inc.',
+   0x0007EC: u'Cisco Systems Inc.',
+   0x0007ED: u'Altera Corporation',
+   0x0007EE: u'telco Informationssysteme GmbH',
+   0x0007EF: u'Lockheed Martin Tactical Systems',
+   0x0007F0: u'LogiSync Corporation',
+   0x0007F1: u'TeraBurst Networks Inc.',
+   0x0007F2: u'IOA Corporation',
+   0x0007F3: u'Thinkengine Networks',
+   0x0007F4: u'Eletex Co., Ltd.',
+   0x0007F5: u'Bridgeco Co AG',
+   0x0007F6: u'Qqest Software Systems',
+   0x0007F7: u'Galtronics',
+   0x0007F8: u'ITDevices, Inc.',
+   0x0007F9: u'Phonetics, Inc.',
+   0x0007FA: u'ITT Co., Ltd.',
+   0x0007FB: u'Giga Stream UMTS Technologies GmbH',
+   0x0007FC: u'Adept Systems Inc.',
+   0x0007FD: u'LANergy Ltd.',
+   0x0007FE: u'Rigaku Corporation',
+   0x0007FF: u'Gluon Networks',
+   0x000800: u'MULTITECH SYSTEMS, INC.',
+   0x000801: u'HighSpeed Surfing Inc.',
+   0x000802: u'Compaq Computer Corporation',
+   0x000803: u'Cos Tron',
+   0x000804: u'ICA Inc.',
+   0x000805: u'Techno-Holon Corporation',
+   0x000806: u'Raonet Systems, Inc.',
+   0x000807: u'Access Devices Limited',
+   0x000808: u'PPT Vision, Inc.',
+   0x000809: u'Systemonic AG',
+   0x00080A: u'Espera-Werke GmbH',
+   0x00080B: u'Birka BPA Informationssystem AB',
+   0x00080C: u'VDA elettronica SrL',
+   0x00080D: u'Toshiba',
+   0x00080E: u'Motorola, BCS',
+   0x00080F: u'Proximion Fiber Optics AB',
+   0x000810: u'Key Technology, Inc.',
+   0x000811: u'VOIX Corporation',
+   0x000812: u'GM-2 Corporation',
+   0x000813: u'Diskbank, Inc.',
+   0x000814: u'TIL Technologies',
+   0x000815: u'CATS Co., Ltd.',
+   0x000816: u'Bluetags A/S',
+   0x000817: u'EmergeCore Networks LLC',
+   0x000818: u'Pixelworks, Inc.',
+   0x000819: u'Banksys',
+   0x00081A: u'Sanrad Intelligence Storage Communications (2000) Ltd.',
+   0x00081B: u'Windigo Systems',
+   0x00081C: u'@pos.com',
+   0x00081D: u'Ipsil, Incorporated',
+   0x00081E: u'Repeatit AB',
+   0x00081F: u'Pou Yuen Tech Corp. Ltd.',
+   0x000820: u'Cisco Systems Inc.',
+   0x000821: u'Cisco Systems Inc.',
+   0x000822: u'InPro Comm',
+   0x000823: u'Texa Corp.',
+   0x000824: u'Promatek Industries Ltd.',
+   0x000825: u'Acme Packet',
+   0x000826: u'Colorado Med Tech',
+   0x000827: u'Pirelli Broadband Solutions',
+   0x000828: u'Koei Engineering Ltd.',
+   0x000829: u'Aval Nagasaki Corporation',
+   0x00082A: u'Powerwallz Network Security',
+   0x00082B: u'Wooksung Electronics, Inc.',
+   0x00082C: u'Homag AG',
+   0x00082D: u'Indus Teqsite Private Limited',
+   0x00082E: u'Multitone Electronics PLC',
+   0x00084E: u'DivergeNet, Inc.',
+   0x00084F: u'Qualstar Corporation',
+   0x000850: u'Arizona Instrument Corp.',
+   0x000851: u'Canadian Bank Note Company, Ltd.',
+   0x000852: u'Davolink Co. Inc.',
+   0x000853: u'Schleicher GmbH & Co. Relaiswerke KG',
+   0x000854: u'Netronix, Inc.',
+   0x000855: u'NASA-Goddard Space Flight Center',
+   0x000856: u'Gamatronic Electronic Industries Ltd.',
+   0x000857: u'Polaris Networks, Inc.',
+   0x000858: u'Novatechnology Inc.',
+   0x000859: u'ShenZhen Unitone Electronics Co., Ltd.',
+   0x00085A: u'IntiGate Inc.',
+   0x00085B: u'Hanbit Electronics Co., Ltd.',
+   0x00085C: u'Shanghai Dare Technologies Co. Ltd.',
+   0x00085D: u'Aastra',
+   0x00085E: u'PCO AG',
+   0x00085F: u'Picanol N.V.',
+   0x000860: u'LodgeNet Entertainment Corp.',
+   0x000861: u'SoftEnergy Co., Ltd.',
+   0x000862: u'NEC Eluminant Technologies, Inc.',
+   0x000863: u'Entrisphere Inc.',
+   0x000864: u'Fasy S.p.A.',
+   0x000865: u'JASCOM CO., LTD',
+   0x000866: u'DSX Access Systems, Inc.',
+   0x000867: u'Uptime Devices',
+   0x000868: u'PurOptix',
+   0x000869: u'Command-e Technology Co.,Ltd.',
+   0x00086A: u'Industrie Technik IPS GmbH',
+   0x00086B: u'MIPSYS',
+   0x00086C: u'Plasmon LMS',
+   0x00086D: u'Missouri FreeNet',
+   0x00086E: u'Hyglo AB',
+   0x00086F: u'Resources Computer Network Ltd.',
+   0x000870: u'Rasvia Systems, Inc.',
+   0x000871: u'NORTHDATA Co., Ltd.',
+   0x000872: u'Sorenson Technologies, Inc.',
+   0x000873: u'DAP Design B.V.',
+   0x000874: u'Dell Computer Corp.',
+   0x000875: u'Acorp Electronics Corp.',
+   0x000876: u'SDSystem',
+   0x000877: u'Liebert HIROSS S.p.A.',
+   0x000878: u'Benchmark Storage Innovations',
+   0x000879: u'CEM Corporation',
+   0x00087A: u'Wipotec GmbH',
+   0x00087B: u'RTX Telecom A/S',
+   0x00087C: u'Cisco Systems, Inc.',
+   0x00087D: u'Cisco Systems Inc.',
+   0x00087E: u'Bon Electro-Telecom Inc.',
+   0x00087F: u'SPAUN electronic GmbH & Co. KG',
+   0x000880: u'BroadTel Canada Communications inc.',
+   0x000881: u'DIGITAL HANDS CO.,LTD.',
+   0x000882: u'SIGMA CORPORATION',
+   0x000883: u'Hewlett-Packard Company',
+   0x000884: u'Index Braille AB',
+   0x000885: u'EMS Dr. Thomas Wuensche',
+   0x000886: u'Hansung Teliann, Inc.',
+   0x000887: u'Maschinenfabrik Reinhausen GmbH',
+   0x000888: u'OULLIM Information Technology Inc,.',
+   0x000889: u'Echostar Technologies Corp',
+   0x00088A: u'Minds@Work',
+   0x00088B: u'Tropic Networks Inc.',
+   0x00088C: u'Quanta Network Systems Inc.',
+   0x00088D: u'Sigma-Links Inc.',
+   0x00088E: u'Nihon Computer Co., Ltd.',
+   0x00088F: u'ADVANCED DIGITAL TECHNOLOGY',
+   0x000890: u'AVILINKS SA',
+   0x000891: u'Lyan Inc.',
+   0x000892: u'EM Solutions',
+   0x000893: u'LE INFORMATION COMMUNICATION INC.',
+   0x000894: u'InnoVISION Multimedia Ltd.',
+   0x000895: u'DIRC Technologie GmbH & Co.KG',
+   0x000896: u'Printronix, Inc.',
+   0x000897: u'Quake Technologies',
+   0x000898: u'Gigabit Optics Corporation',
+   0x000899: u'Netbind, Inc.',
+   0x00089A: u'Alcatel Microelectronics',
+   0x00089B: u'ICP Electronics Inc.',
+   0x00089C: u'Elecs Industry Co., Ltd.',
+   0x00089D: u'UHD-Elektronik',
+   0x00089E: u'Beijing Enter-Net co.LTD',
+   0x00089F: u'EFM Networks',
+   0x0008A0: u'Stotz Feinmesstechnik GmbH',
+   0x0008A1: u'CNet Technology Inc.',
+   0x0008A2: u'ADI Engineering, Inc.',
+   0x0008A3: u'Cisco Systems',
+   0x0008A4: u'Cisco Systems',
+   0x0008A5: u'Peninsula Systems Inc.',
+   0x0008A6: u'Multiware & Image Co., Ltd.',
+   0x0008A7: u'iLogic Inc.',
+   0x0008A8: u'Systec Co., Ltd.',
+   0x0008A9: u'SangSang Technology, Inc.',
+   0x0008AA: u'KARAM',
+   0x0008AB: u'EnerLinx.com, Inc.',
+   0x0008AC: u'PRIVATE',
+   0x0008AD: u'Toyo-Linx Co., Ltd.',
+   0x0008AE: u'PacketFront Sweden AB',
+   0x0008AF: u'Novatec Corporation',
+   0x0008B0: u'BKtel communications GmbH',
+   0x0008B1: u'ProQuent Systems',
+   0x0008B2: u'SHENZHEN COMPASS TECHNOLOGY DEVELOPMENT CO.,LTD',
+   0x0008B3: u'Fastwel',
+   0x0008B4: u'SYSPOL',
+   0x0008B5: u'TAI GUEN ENTERPRISE CO., LTD',
+   0x0008B6: u'RouteFree, Inc.',
+   0x0008B7: u'HIT Incorporated',
+   0x0008B8: u'E.F. Johnson',
+   0x0008B9: u'KAON MEDIA Co., Ltd.',
+   0x0008BA: u'Erskine Systems Ltd',
+   0x0008BB: u'NetExcell',
+   0x0008BC: u'Ilevo AB',
+   0x0008BD: u'TEPG-US',
+   0x0008BE: u'XENPAK MSA Group',
+   0x0008BF: u'Aptus Elektronik AB',
+   0x0008C0: u'ASA SYSTEMS',
+   0x0008C1: u'Avistar Communications Corporation',
+   0x0008C2: u'Cisco Systems',
+   0x0008C3: u'Contex A/S',
+   0x0008C4: u'Hikari Co.,Ltd.',
+   0x0008C5: u'Liontech Co., Ltd.',
+   0x0008C6: u'Philips Consumer Communications',
+   0x0008C7: u'COMPAQ COMPUTER CORPORATION',
+   0x0008C8: u'Soneticom, Inc.',
+   0x0008C9: u'TechniSat Digital GmbH',
+   0x0008CA: u'TwinHan Technology Co.,Ltd',
+   0x0008CB: u'Zeta Broadband Inc.',
+   0x0008CC: u'Remotec, Inc.',
+   0x0008CD: u'With-Net Inc',
+   0x0008CE: u'IPMobileNet Inc.',
+   0x0008CF: u'Nippon Koei Power Systems Co., Ltd.',
+   0x0008D0: u'Musashi Engineering Co., LTD.',
+   0x0008D1: u'KAREL INC.',
+   0x0008D2: u'ZOOM Networks Inc.',
+   0x0008D3: u'Hercules Technologies S.A.',
+   0x0008D4: u'IneoQuest Technologies, Inc',
+   0x0008D5: u'Vanguard Managed Solutions',
+   0x0008D6: u'HASSNET Inc.',
+   0x0008D7: u'HOW CORPORATION',
+   0x0008D8: u'Dowkey Microwave',
+   0x0008D9: u'Mitadenshi Co.,LTD',
+   0x0008DA: u'SofaWare Technologies Ltd.',
+   0x0008DB: u'Corrigent Systems',
+   0x0008DC: u'Wiznet',
+   0x0008DD: u'Telena Communications, Inc.',
+   0x0008DE: u'3UP Systems',
+   0x0008DF: u'Alistel Inc.',
+   0x0008E0: u'ATO Technology Ltd.',
+   0x0008E1: u'Barix AG',
+   0x0008E2: u'Cisco Systems',
+   0x0008E3: u'Cisco Systems',
+   0x0008E4: u'Envenergy Inc',
+   0x0008E5: u'IDK Corporation',
+   0x0008E6: u'Littlefeet',
+   0x0008E7: u'SHI ControlSystems,Ltd.',
+   0x0008E8: u'Excel Master Ltd.',
+   0x0008E9: u'NextGig',
+   0x0008EA: u'Motion Control Engineering, Inc',
+   0x0008EB: u'ROMWin Co.,Ltd.',
+   0x0008EC: u'Zonu, Inc.',
+   0x0008ED: u'ST&T Instrument Corp.',
+   0x0008EE: u'Logic Product Development',
+   0x0008EF: u'DIBAL,S.A.',
+   0x0008F0: u'Next Generation Systems, Inc.',
+   0x0008F1: u'Voltaire',
+   0x0008F2: u'C&S Technology',
+   0x0008F3: u'WANY',
+   0x0008F4: u'Bluetake Technology Co., Ltd.',
+   0x0008F5: u'YESTECHNOLOGY Co.,Ltd.',
+   0x0008F6: u'SUMITOMO ELECTRIC HIGHTECHS.co.,ltd.',
+   0x0008F7: u'Hitachi Ltd, Semiconductor &amp; Integrated Circuits Gr',
+   0x0008F8: u'Guardall Ltd',
+   0x0008F9: u'Padcom, Inc.',
+   0x0008FA: u'Karl E.Brinkmann GmbH',
+   0x0008FB: u'SonoSite, Inc.',
+   0x0008FC: u'Gigaphoton Inc.',
+   0x0008FD: u'BlueKorea Co., Ltd.',
+   0x0008FE: u'UNIK C&C Co.,Ltd.',
+   0x0008FF: u'Trilogy Communications Ltd',
+   0x000900: u'TMT',
+   0x000901: u'Shenzhen Shixuntong Information & Technoligy Co',
+   0x000902: u'Redline Communications Inc.',
+   0x000903: u'Panasas, Inc',
+   0x000904: u'MONDIAL electronic',
+   0x000905: u'iTEC Technologies Ltd.',
+   0x000906: u'Esteem Networks',
+   0x000907: u'Chrysalis Development',
+   0x000908: u'VTech Technology Corp.',
+   0x000909: u'Telenor Connect A/S',
+   0x00090A: u'SnedFar Technology Co., Ltd.',
+   0x00090B: u'MTL  Instruments PLC',
+   0x00090C: u'Mayekawa Mfg. Co. Ltd.',
+   0x00090D: u'LEADER ELECTRONICS CORP.',
+   0x00090E: u'Helix Technology Inc.',
+   0x00090F: u'Fortinet Inc.',
+   0x000910: u'Simple Access Inc.',
+   0x000911: u'Cisco Systems',
+   0x000912: u'Cisco Systems',
+   0x000913: u'SystemK Corporation',
+   0x000914: u'COMPUTROLS INC.',
+   0x000915: u'CAS Corp.',
+   0x000916: u'Listman Home Technologies, Inc.',
+   0x000917: u'WEM Technology Inc',
+   0x000918: u'SAMSUNG TECHWIN CO.,LTD',
+   0x000919: u'MDS Gateways',
+   0x00091A: u'Macat Optics & Electronics Co., Ltd.',
+   0x00091B: u'Digital Generation Inc.',
+   0x00091C: u'CacheVision, Inc',
+   0x00091D: u'Proteam Computer Corporation',
+   0x00091E: u'Firstech Technology Corp.',
+   0x00091F: u'A&amp;D Co., Ltd.',
+   0x000920: u'EpoX COMPUTER CO.,LTD.',
+   0x000921: u'Planmeca Oy',
+   0x000922: u'Touchless Sensor Technology AG',
+   0x000923: u'Heaman System Co., Ltd',
+   0x000924: u'Telebau GmbH',
+   0x000925: u'VSN Systemen BV',
+   0x000926: u'YODA COMMUNICATIONS, INC.',
+   0x000927: u'TOYOKEIKI CO.,LTD.',
+   0x000928: u'Telecore Inc',
+   0x000929: u'Sanyo Industries (UK) Limited',
+   0x00092A: u'MYTECS Co.,Ltd.',
+   0x00092B: u'iQstor Networks, Inc.',
+   0x00092C: u'Hitpoint Inc.',
+   0x00092D: u'High Tech Computer, Corp.',
+   0x00092E: u'B&Tech System Inc.',
+   0x00092F: u'Akom Technology Corporation',
+   0x000930: u'AeroConcierge Inc.',
+   0x000931: u'Future Internet, Inc.',
+   0x000932: u'Omnilux',
+   0x000933: u'OPTOVALLEY Co. Ltd.',
+   0x000934: u'Dream-Multimedia-Tv GmbH',
+   0x000935: u'Sandvine Incorporated',
+   0x000936: u'Ipetronik GmbH & Co.KG',
+   0x000937: u'Inventec Appliance Corp',
+   0x000938: u'Allot Communications',
+   0x000939: u'ShibaSoku Co.,Ltd.',
+   0x00093A: u'Molex Fiber Optics',
+   0x00093B: u'HYUNDAI NETWORKS INC.',
+   0x00093C: u'Jacques Technologies P/L',
+   0x00093D: u'Newisys,Inc.',
+   0x00093E: u'C&I Technologies',
+   0x00093F: u'Double-Win Enterpirse CO., LTD',
+   0x000940: u'AGFEO GmbH & Co. KG',
+   0x000941: u'Allied Telesis K.K.',
+   0x000942: u'CRESCO, LTD.',
+   0x000943: u'Cisco Systems',
+   0x000944: u'Cisco Systems',
+   0x000945: u'Palmmicro Communications Inc',
+   0x000946: u'Cluster Labs GmbH',
+   0x000947: u'Aztek, Inc.',
+   0x000948: u'Vista Control Systems, Corp.',
+   0x000949: u'Glyph Technologies Inc.',
+   0x00094A: u'Homenet Communications',
+   0x00094B: u'FillFactory NV',
+   0x00094C: u'Communication Weaver Co.,Ltd.',
+   0x00094D: u'Braintree Communications Pty Ltd',
+   0x00094E: u'BARTECH SYSTEMS INTERNATIONAL, INC',
+   0x00094F: u'elmegt GmbH & Co. KG',
+   0x000950: u'Independent Storage Corporation',
+   0x000951: u'Apogee Instruments, Inc',
+   0x000952: u'Auerswald GmbH & Co. KG',
+   0x000953: u'Linkage System Integration Co.Ltd.',
+   0x000954: u'AMiT spol. s. r. o.',
+   0x000955: u'Young Generation International Corp.',
+   0x000956: u'Network Systems Group, Ltd. (NSG)',
+   0x000957: u'Supercaller, Inc.',
+   0x000958: u'INTELNET S.A.',
+   0x000959: u'Sitecsoft',
+   0x00095A: u'RACEWOOD TECHNOLOGY',
+   0x00095B: u'Netgear, Inc.',
+   0x00095C: u'Philips Medical Systems - Cardiac and Monitoring Systems (CM',
+   0x00095D: u'Dialogue Technology Corp.',
+   0x00095E: u'Masstech Group Inc.',
+   0x00095F: u'Telebyte, Inc.',
+   0x000960: u'YOZAN Inc.',
+   0x000961: u'Switchgear and Instrumentation Ltd',
+   0x000962: u'Filetrac AS',
+   0x000963: u'Dominion Lasercom Inc.',
+   0x000964: u'Hi-Techniques',
+   0x000965: u'PRIVATE',
+   0x000966: u'Thales Navigation',
+   0x000967: u'Tachyon, Inc',
+   0x000968: u'TECHNOVENTURE, INC.',
+   0x000969: u'Meret Optical Communications',
+   0x00096A: u'Cloverleaf Communications Inc.',
+   0x00096B: u'IBM Corporation',
+   0x00096C: u'Imedia Semiconductor Corp.',
+   0x00096D: u'Powernet Technologies Corp.',
+   0x00096E: u'GIANT ELECTRONICS LTD.',
+   0x00096F: u'Beijing Zhongqing Elegant Tech. Corp.,Limited',
+   0x000970: u'Vibration Research Corporation',
+   0x000971: u'Time Management, Inc.',
+   0x000972: u'Securebase,Inc',
+   0x000973: u'Lenten Technology Co., Ltd.',
+   0x000974: u'Innopia Technologies, Inc.',
+   0x000975: u'fSONA Communications Corporation',
+   0x000976: u'Datasoft ISDN Systems GmbH',
+   0x000977: u'Brunner Elektronik AG',
+   0x000978: u'AIJI System Co., Ltd.',
+   0x000979: u'Advanced Television Systems Committee, Inc.',
+   0x00097A: u'Louis Design Labs.',
+   0x00097B: u'Cisco Systems',
+   0x00097C: u'Cisco Systems',
+   0x00097D: u'SecWell Networks Oy',
+   0x00097E: u'IMI TECHNOLOGY CO., LTD',
+   0x00097F: u'Vsecure 2000 LTD.',
+   0x000980: u'Power Zenith Inc.',
+   0x000981: u'Newport Networks',
+   0x000982: u'Loewe Opta GmbH',
+   0x000983: u'Gvision Incorporated',
+   0x000984: u'MyCasa Network Inc.',
+   0x000985: u'Auto Telecom Company',
+   0x000986: u'Metalink LTD.',
+   0x000987: u'NISHI NIPPON ELECTRIC WIRE & CABLE CO.,LTD.',
+   0x000988: u'Nudian Electron Co., Ltd.',
+   0x000989: u'VividLogic Inc.',
+   0x00098A: u'EqualLogic Inc',
+   0x00098B: u'Entropic Communications, Inc.',
+   0x00098C: u'Option Wireless Sweden',
+   0x00098D: u'Velocity Semiconductor',
+   0x00098E: u'ipcas GmbH',
+   0x00098F: u'Cetacean Networks',
+   0x000990: u'ACKSYS Communications & systems',
+   0x000991: u'GE Fanuc Automation Manufacturing, Inc.',
+   0x000992: u'InterEpoch Technology,INC.',
+   0x000993: u'Visteon Corporation',
+   0x000994: u'Cronyx Engineering',
+   0x000995: u'Castle Technology Ltd',
+   0x000996: u'RDI',
+   0x000997: u'Nortel Networks',
+   0x000998: u'Capinfo Company Limited',
+   0x000999: u'CP GEORGES RENAULT',
+   0x00099A: u'ELMO COMPANY, LIMITED',
+   0x00099B: u'Western Telematic Inc.',
+   0x00099C: u'Naval Research Laboratory',
+   0x00099D: u'Haliplex Communications',
+   0x00099E: u'Testech, Inc.',
+   0x00099F: u'VIDEX INC.',
+   0x0009A0: u'Microtechno Corporation',
+   0x0009A1: u'Telewise Communications, Inc.',
+   0x0009A2: u'Interface Co., Ltd.',
+   0x0009A3: u'Leadfly Techologies Corp. Ltd.',
+   0x0009A4: u'HARTEC Corporation',
+   0x0009A5: u'HANSUNG ELETRONIC INDUSTRIES DEVELOPMENT CO., LTD',
+   0x0009A6: u'Ignis Optics, Inc.',
+   0x0009A7: u'Bang & Olufsen A/S',
+   0x0009A8: u'Eastmode Pte Ltd',
+   0x0009A9: u'Ikanos Communications',
+   0x0009AA: u'Data Comm for Business, Inc.',
+   0x0009AB: u'Netcontrol Oy',
+   0x0009AC: u'LANVOICE',
+   0x0009AD: u'HYUNDAI SYSCOMM, INC.',
+   0x0009AE: u'OKANO ELECTRIC CO.,LTD',
+   0x0009AF: u'e-generis',
+   0x0009B0: u'Onkyo Corporation',
+   0x0009B1: u'Kanematsu Electronics, Ltd.',
+   0x0009B2: u'L&F Inc.',
+   0x0009B3: u'MCM Systems Ltd',
+   0x0009B4: u'KISAN TELECOM CO., LTD.',
+   0x0009B5: u'3J Tech. Co., Ltd.',
+   0x0009B6: u'Cisco Systems',
+   0x0009B7: u'Cisco Systems',
+   0x0009B8: u'Entise Systems',
+   0x0009B9: u'Action Imaging Solutions',
+   0x0009BA: u'MAKU Informationstechik GmbH',
+   0x0009BB: u'MathStar, Inc.',
+   0x0009BC: u'Integrian, Inc.',
+   0x0009BD: u'Epygi Technologies, Ltd.',
+   0x0009BE: u'Mamiya-OP Co.,Ltd.',
+   0x0009BF: u'Nintendo Co.,Ltd.',
+   0x0009C0: u'6WIND',
+   0x0009C1: u'PROCES-DATA A/S',
+   0x0009C2: u'PRIVATE',
+   0x0009C3: u'NETAS',
+   0x0009C4: u'Medicore Co., Ltd',
+   0x0009C5: u'KINGENE Technology Corporation',
+   0x0009C6: u'Visionics Corporation',
+   0x0009C7: u'Movistec',
+   0x0009C8: u'SINAGAWA TSUSHIN KEISOU SERVICE',
+   0x0009C9: u'BlueWINC Co., Ltd.',
+   0x0009CA: u'iMaxNetworks(Shenzhen)Limited.',
+   0x0009CB: u'HBrain',
+   0x0009CC: u'Moog GmbH',
+   0x0009CD: u'HUDSON SOFT CO.,LTD.',
+   0x0009CE: u'SpaceBridge Semiconductor Corp.',
+   0x0009CF: u'iAd GmbH',
+   0x0009D0: u'Versatel Networks',
+   0x0009D1: u'SERANOA NETWORKS INC',
+   0x0009D2: u'Mai Logic Inc.',
+   0x0009D3: u'Western DataCom Co., Inc.',
+   0x0009D4: u'Transtech Networks',
+   0x0009D5: u'Signal Communication, Inc.',
+   0x0009D6: u'KNC One GmbH',
+   0x0009D7: u'DC Security Products',
+   0x0009D8: u'PRIVATE',
+   0x0009D9: u'Neoscale Systems, Inc',
+   0x0009DA: u'Control Module Inc.',
+   0x0009DB: u'eSpace',
+   0x0009DC: u'Galaxis Technology AG',
+   0x0009DD: u'Mavin Technology Inc.',
+   0x0009DE: u'Samjin Information & Communications Co., Ltd.',
+   0x0009DF: u'Vestel Komunikasyon Sanayi ve Ticaret A.S.',
+   0x0009E0: u'XEMICS S.A.',
+   0x0009E1: u'Gemtek Technology Co., Ltd.',
+   0x0009E2: u'Sinbon Electronics Co., Ltd.',
+   0x0009E3: u'Angel Iglesias S.A.',
+   0x0009E4: u'K Tech Infosystem Inc.',
+   0x0009E5: u'Hottinger Baldwin Messtechnik GmbH',
+   0x0009E6: u'Cyber Switching Inc.',
+   0x0009E7: u'ADC Techonology',
+   0x0009E8: u'Cisco Systems',
+   0x0009E9: u'Cisco Systems',
+   0x0009EA: u'YEM Inc.',
+   0x0009EB: u'HuMANDATA LTD.',
+   0x0009EC: u'Daktronics, Inc.',
+   0x0009ED: u'CipherOptics',
+   0x0009EE: u'MEIKYO ELECTRIC CO.,LTD',
+   0x0009EF: u'Vocera Communications',
+   0x0009F0: u'Shimizu Technology Inc.',
+   0x0009F1: u'Yamaki Electric Corporation',
+   0x0009F2: u'Cohu, Inc., Electronics Division',
+   0x0009F3: u'WELL Communication Corp.',
+   0x0009F4: u'Alcon Laboratories, Inc.',
+   0x0009F5: u'Emerson Network Power Co.,Ltd',
+   0x0009F6: u'Shenzhen Eastern Digital Tech Ltd.',
+   0x0009F7: u'SED, a division of Calian',
+   0x0009F8: u'UNIMO TECHNOLOGY CO., LTD.',
+   0x0009F9: u'ART JAPAN CO., LTD.',
+   0x0009FB: u'Philips Medizinsysteme Boeblingen GmbH',
+   0x0009FC: u'IPFLEX Inc.',
+   0x0009FD: u'Ubinetics Limited',
+   0x0009FE: u'Daisy Technologies, Inc.',
+   0x0009FF: u'X.net 2000 GmbH',
+   0x000A00: u'Mediatek Corp.',
+   0x000A01: u'SOHOware, Inc.',
+   0x000A02: u'ANNSO CO., LTD.',
+   0x000A03: u'ENDESA SERVICIOS, S.L.',
+   0x000A04: u'3Com Europe Ltd',
+   0x000A05: u'Widax Corp.',
+   0x000A06: u'Teledex LLC',
+   0x000A07: u'WebWayOne Ltd',
+   0x000A08: u'ALPINE ELECTRONICS, INC.',
+   0x000A09: u'TaraCom Integrated Products, Inc.',
+   0x000A0A: u'SUNIX Co., Ltd.',
+   0x000A0B: u'Sealevel Systems, Inc.',
+   0x000A0C: u'Scientific Research Corporation',
+   0x000A0D: u'MergeOptics GmbH',
+   0x000A0E: u'Invivo Research Inc.',
+   0x000A0F: u'Ilryung Telesys, Inc',
+   0x000A10: u'FAST media integrations AG',
+   0x000A11: u'ExPet Technologies, Inc',
+   0x000A12: u'Azylex Technology, Inc',
+   0x000A13: u'Silent Witness',
+   0x000A14: u'TECO a.s.',
+   0x000A15: u'Silicon Data, Inc',
+   0x000A16: u'Lassen Research',
+   0x000A17: u'NESTAR COMMUNICATIONS, INC',
+   0x000A18: u'Vichel Inc.',
+   0x000A19: u'Valere Power, Inc.',
+   0x000A1A: u'Imerge Ltd',
+   0x000A1B: u'Stream Labs',
+   0x000A1C: u'Bridge Information Co., Ltd.',
+   0x000A1D: u'Optical Communications Products Inc.',
+   0x000A1E: u'Red-M Products Limited',
+   0x000A1F: u'ART WARE Telecommunication Co., Ltd.',
+   0x000A20: u'SVA Networks, Inc.',
+   0x000A21: u'Integra Telecom Co. Ltd',
+   0x000A22: u'Amperion Inc',
+   0x000A23: u'Parama Networks Inc',
+   0x000A24: u'Octave Communications',
+   0x000A25: u'CERAGON NETWORKS',
+   0x000A26: u'CEIA S.p.A.',
+   0x000A27: u'Apple Computer, Inc.',
+   0x000A28: u'Motorola',
+   0x000A29: u'Pan Dacom Networking AG',
+   0x000A2A: u'QSI Systems Inc.',
+   0x000A2B: u'Etherstuff',
+   0x000A2C: u'Active Tchnology Corporation',
+   0x000A2D: u'PRIVATE',
+   0x000A2E: u'MAPLE NETWORKS CO., LTD',
+   0x000A2F: u'Artnix Inc.',
+   0x000A30: u'Johnson Controls-ASG',
+   0x000A31: u'HCV Wireless',
+   0x000A32: u'Xsido Corporation',
+   0x000A33: u'Emulex Corporation',
+   0x000A34: u'Identicard Systems Incorporated',
+   0x000A35: u'Xilinx',
+   0x000A36: u'Synelec Telecom Multimedia',
+   0x000A37: u'Procera Networks, Inc.',
+   0x000A38: u'Netlock Technologies, Inc.',
+   0x000A39: u'LoPA Information Technology',
+   0x000A3A: u'J-THREE INTERNATIONAL Holding Co., Ltd.',
+   0x000A3B: u'GCT Semiconductor, Inc',
+   0x000A3C: u'Enerpoint Ltd.',
+   0x000A3D: u'Elo Sistemas Eletronicos S.A.',
+   0x000A3E: u'EADS Telecom',
+   0x000A3F: u'Data East Corporation',
+   0x000A40: u'Crown Audio',
+   0x000A41: u'Cisco Systems',
+   0x000A42: u'Cisco Systems',
+   0x000A43: u'Chunghwa Telecom Co., Ltd.',
+   0x000A44: u'Avery Dennison Deutschland GmbH',
+   0x000A45: u'Audio-Technica Corp.',
+   0x000A46: u'ARO Controls SAS',
+   0x000A47: u'Allied Vision Technologies',
+   0x000A48: u'Albatron Technology',
+   0x000A49: u'Acopia Networks',
+   0x000A4A: u'Targa Systems Ltd.',
+   0x000A4B: u'DataPower Technology, Inc.',
+   0x000A4C: u'Molecular Devices Corporation',
+   0x000A4D: u'Noritz Corporation',
+   0x000A4E: u'UNITEK Electronics INC.',
+   0x000A4F: u'Brain Boxes Limited',
+   0x000A50: u'REMOTEK CORPORATION',
+   0x000A51: u'GyroSignal Technology Co., Ltd.',
+   0x000A52: u'AsiaRF Ltd.',
+   0x000A53: u'Intronics, Incorporated',
+   0x000A54: u'Laguna Hills, Inc.',
+   0x000A55: u'MARKEM Corporation',
+   0x000A56: u'HITACHI Maxell Ltd.',
+   0x000A57: u'Hewlett-Packard Company - Standards',
+   0x000A58: u'Ingenieur-Buero Freyer & Siegel',
+   0x000A59: u'HW server',
+   0x000A5A: u'GreenNET Technologies Co.,Ltd.',
+   0x000A5B: u'Power-One as',
+   0x000A5C: u'Carel s.p.a.',
+   0x000A5D: u'PUC Founder (MSC) Berhad',
+   0x000A5E: u'3COM Corporation',
+   0x000A5F: u'almedio inc.',
+   0x000A60: u'Autostar Technology Pte Ltd',
+   0x000A61: u'Cellinx Systems Inc.',
+   0x000A62: u'Crinis Networks, Inc.',
+   0x000A63: u'DHD GmbH',
+   0x000A64: u'Eracom Technologies',
+   0x000A65: u'GentechMedia.co.,ltd.',
+   0x000A66: u'MITSUBISHI ELECTRIC SYSTEM & SERVICE CO.,LTD.',
+   0x000A67: u'OngCorp',
+   0x000A68: u'SolarFlare Communications, Inc.',
+   0x000A69: u'SUNNY bell Technology Co., Ltd.',
+   0x000A6A: u'SVM Microwaves s.r.o.',
+   0x000A6B: u'Tadiran Telecom Business Systems LTD',
+   0x000A6C: u'Walchem Corporation',
+   0x000A6D: u'EKS Elektronikservice GmbH',
+   0x000A6E: u'Broadcast Technology Limited',
+   0x000A6F: u'ZyFLEX Technologies Inc',
+   0x000A70: u'MPLS Forum',
+   0x000A71: u'Avrio Technologies, Inc',
+   0x000A72: u'SimpleTech, Inc.',
+   0x000A73: u'Scientific Atlanta',
+   0x000A74: u'Manticom Networks Inc.',
+   0x000A75: u'Cat Electronics',
+   0x000A76: u'Beida Jade Bird Huaguang Technology Co.,Ltd',
+   0x000A77: u'Bluewire Technologies LLC',
+   0x000A78: u'OLITEC',
+   0x000A79: u'corega K.K.',
+   0x000A7A: u'Kyoritsu Electric Co., Ltd.',
+   0x000A7B: u'Cornelius Consult',
+   0x000A7C: u'Tecton Ltd',
+   0x000A7D: u'Valo, Inc.',
+   0x000A7E: u'The Advantage Group',
+   0x000A7F: u'Teradon Industries, Inc',
+   0x000A80: u'Telkonet Inc.',
+   0x000A81: u'TEIMA Audiotex S.L.',
+   0x000A82: u'TATSUTA SYSTEM ELECTRONICS CO.,LTD.',
+   0x000A83: u'SALTO SYSTEMS S.L.',
+   0x000A84: u'Rainsun Enterprise Co., Ltd.',
+   0x000A85: u'PLAT\'C2,Inc',
+   0x000A86: u'Lenze',
+   0x000A87: u'Integrated Micromachines Inc.',
+   0x000A88: u'InCypher S.A.',
+   0x000A89: u'Creval Systems, Inc.',
+   0x000A8A: u'Cisco Systems',
+   0x000A8B: u'Cisco Systems',
+   0x000A8C: u'Guardware Systems Ltd.',
+   0x000A8D: u'EUROTHERM LIMITED',
+   0x000A8E: u'Invacom Ltd',
+   0x000A8F: u'Aska International Inc.',
+   0x000A90: u'Bayside Interactive, Inc.',
+   0x000A91: u'HemoCue AB',
+   0x000A92: u'Presonus Corporation',
+   0x000A93: u'W2 Networks, Inc.',
+   0x000A94: u'ShangHai cellink CO., LTD',
+   0x000A95: u'Apple Computer, Inc.',
+   0x000A96: u'MEWTEL TECHNOLOGY INC.',
+   0x000A97: u'SONICblue, Inc.',
+   0x000A98: u'M+F Gwinner GmbH & Co',
+   0x000A99: u'Dataradio Inc.',
+   0x000A9A: u'Aiptek International Inc',
+   0x000A9B: u'Towa Meccs Corporation',
+   0x000A9C: u'Server Technology, Inc.',
+   0x000A9D: u'King Young Technology Co. Ltd.',
+   0x000A9E: u'BroadWeb Corportation',
+   0x000A9F: u'Pannaway Technologies, Inc.',
+   0x000AA0: u'Cedar Point Communications',
+   0x000AA1: u'V V S Limited',
+   0x000AA2: u'SYSTEK INC.',
+   0x000AA3: u'SHIMAFUJI ELECTRIC CO.,LTD.',
+   0x000AA4: u'SHANGHAI SURVEILLANCE TECHNOLOGY CO,LTD',
+   0x000AA5: u'MAXLINK INDUSTRIES LIMITED',
+   0x000AA6: u'Hochiki Corporation',
+   0x000AA7: u'FEI Company',
+   0x000AA8: u'ePipe Pty. Ltd.',
+   0x000AA9: u'Brooks Automation GmbH',
+   0x000AAA: u'AltiGen Communications Inc.',
+   0x000AAB: u'TOYOTA MACS, INC.',
+   0x000AAC: u'TerraTec Electronic GmbH',
+   0x000AAD: u'Stargames Corporation',
+   0x000AAE: u'Rosemount Process Analytical',
+   0x000AAF: u'Pipal Systems',
+   0x000AB0: u'LOYTEC electronics GmbH',
+   0x000AB1: u'GENETEC Corporation',
+   0x000AB2: u'Fresnel Wireless Systems',
+   0x000AB3: u'Fa. GIRA',
+   0x000AB4: u'ETIC Telecommunications',
+   0x000AB5: u'Digital Electronic Network',
+   0x000AB6: u'COMPUNETIX, INC',
+   0x000AB7: u'Cisco Systems',
+   0x000AB8: u'Cisco Systems',
+   0x000AB9: u'Astera Technologies Corp.',
+   0x000ABA: u'Arcon Technology Limited',
+   0x000ABB: u'Taiwan Secom Co,. Ltd',
+   0x000ABC: u'Seabridge Ltd.',
+   0x000ABD: u'Rupprecht & Patashnick Co.',
+   0x000ABE: u'OPNET Technologies CO., LTD.',
+   0x000ABF: u'HIROTA SS',
+   0x000AC0: u'Fuyoh Video Industry CO., LTD.',
+   0x000AC1: u'Futuretel',
+   0x000AC2: u'FiberHome Telecommunication Technologies CO.,LTD',
+   0x000AC3: u'eM Technics Co., Ltd.',
+   0x000AC4: u'Daewoo Teletech Co., Ltd',
+   0x000AC5: u'Color Kinetics',
+   0x000AC6: u'Ceterus Networks, Inc.',
+   0x000AC7: u'Unication Group',
+   0x000AC8: u'ZPSYS CO.,LTD. (Planning&Management)',
+   0x000AC9: u'Zambeel Inc',
+   0x000ACA: u'YOKOYAMA SHOKAI CO.,Ltd.',
+   0x000ACB: u'XPAK MSA Group',
+   0x000ACC: u'Winnow Networks, Inc.',
+   0x000ACD: u'Sunrich Technology Limited',
+   0x000ACE: u'RADIANTECH, INC.',
+   0x000ACF: u'PROVIDEO Multimedia Co. Ltd.',
+   0x000AD0: u'Niigata Develoment Center,  F.I.T. Co., Ltd.',
+   0x000AD1: u'MWS',
+   0x000AD2: u'JEPICO Corporation',
+   0x000AD3: u'INITECH Co., Ltd',
+   0x000AD4: u'CoreBell Systems Inc.',
+   0x000AD5: u'Brainchild Electronic Co., Ltd.',
+   0x000AD6: u'BeamReach Networks',
+   0x000AD7: u'Origin ELECTRIC CO.,LTD.',
+   0x000AD8: u'IPCserv Technology Corp.',
+   0x000AD9: u'Sony Ericsson Mobile Communications AB',
+   0x000ADA: u'PRIVATE',
+   0x000ADB: u'SkyPilot Network, Inc',
+   0x000ADC: u'RuggedCom Inc.',
+   0x000ADD: u'InSciTek Microsystems, Inc.',
+   0x000ADE: u'Happy Communication Co., Ltd.',
+   0x000ADF: u'Gennum Corporation',
+   0x000AE0: u'Fujitsu Softek',
+   0x000AE1: u'EG Technology',
+   0x000AE2: u'Binatone Electronics International, Ltd',
+   0x000AE3: u'YANG MEI TECHNOLOGY CO., LTD',
+   0x000AE4: u'Wistron Corp.',
+   0x000AE5: u'ScottCare Corporation',
+   0x000AE6: u'Elitegroup Computer System Co. (ECS)',
+   0x000AE7: u'ELIOP S.A.',
+   0x000AE8: u'Cathay Roxus Information Technology Co. LTD',
+   0x000AE9: u'AirVast Technology Inc.',
+   0x000AEA: u'ADAM ELEKTRONIK LTD.STI.',
+   0x000AEB: u'Shenzhen Tp-Link Technology Co; Ltd.',
+   0x000AEC: u'Koatsu Gas Kogyo Co., Ltd.',
+   0x000AED: u'HARTING Vending G.m.b.H. & CO KG',
+   0x000AEE: u'GCD Hard- & Software GmbH',
+   0x000AEF: u'OTRUM ASA',
+   0x000AF0: u'SHIN-OH ELECTRONICS CO., LTD. R&D',
+   0x000AF1: u'Clarity Design, Inc.',
+   0x000AF2: u'NeoAxiom Corp.',
+   0x000AF3: u'Cisco Systems',
+   0x000AF4: u'Cisco Systems',
+   0x000AF5: u'Airgo Networks, Inc.',
+   0x000AF6: u'Computer Process Controls',
+   0x000AF7: u'Broadcom Corp.',
+   0x000AF8: u'American Telecare Inc.',
+   0x000AF9: u'HiConnect, Inc.',
+   0x000AFA: u'Traverse Technologies Australia',
+   0x000AFB: u'Ambri Limited',
+   0x000AFC: u'Core Tec Communications, LLC',
+   0x000AFD: u'Viking Electronic Services',
+   0x000AFE: u'NovaPal Ltd',
+   0x000AFF: u'Kilchherr Elektronik AG',
+   0x000B00: u'FUJIAN START COMPUTER EQUIPMENT CO.,LTD',
+   0x000B01: u'DAIICHI ELECTRONICS CO., LTD.',
+   0x000B02: u'Dallmeier electronic',
+   0x000B03: u'Taekwang Industrial Co., Ltd',
+   0x000B04: u'Volktek Corporation',
+   0x000B05: u'Pacific Broadband Networks',
+   0x000B06: u'Motorola BCS',
+   0x000B07: u'Voxpath Networks',
+   0x000B08: u'Pillar Data Systems',
+   0x000B09: u'Ifoundry Systems Singapore',
+   0x000B0A: u'dBm Optics',
+   0x000B0B: u'Corrent Corporation',
+   0x000B0C: u'Agile Systems Inc.',
+   0x000B0D: u'Air2U, Inc.',
+   0x000B0E: u'Trapeze Networks',
+   0x000B0F: u'Nyquist Industrial Control BV',
+   0x000B10: u'11wave Technonlogy Co.,Ltd',
+   0x000B11: u'HIMEJI ABC TRADING CO.,LTD.',
+   0x000B12: u'NURI Telecom Co., Ltd.',
+   0x000B13: u'ZETRON INC',
+   0x000B14: u'ViewSonic Corporation',
+   0x000B15: u'Platypus Technology',
+   0x000B16: u'Communication Machinery Corporation',
+   0x000B17: u'MKS Instruments',
+   0x000B18: u'PRIVATE',
+   0x000B19: u'Vernier Networks, Inc.',
+   0x000B1A: u'Teltone Corporation',
+   0x000B1B: u'Systronix, Inc.',
+   0x000B1C: u'SIBCO bv',
+   0x000B1D: u'LayerZero Power Systems, Inc.',
+   0x000B1E: u'KAPPA opto-electronics GmbH',
+   0x000B1F: u'I CON Computer Co.',
+   0x000B20: u'Hirata corporation',
+   0x000B21: u'G-Star Communications Inc.',
+   0x000B22: u'Environmental Systems and Services',
+   0x000B23: u'Siemens Subscriber Networks',
+   0x000B24: u'AirLogic',
+   0x000B25: u'Aeluros',
+   0x000B26: u'Wetek Corporation',
+   0x000B27: u'Scion Corporation',
+   0x000B28: u'Quatech Inc.',
+   0x000B29: u'LG Industrial Systems Co.,Ltd.',
+   0x000B2A: u'HOWTEL Co., Ltd.',
+   0x000B2B: u'HOSTNET CORPORATION',
+   0x000B2C: u'Eiki Industrial Co. Ltd.',
+   0x000B2D: u'Danfoss Inc.',
+   0x000B2E: u'Cal-Comp Electronics (Thailand) Public Company Limited Taipe',
+   0x000B2F: u'bplan GmbH',
+   0x000B30: u'Beijing Gongye Science & Technology Co.,Ltd',
+   0x000B31: u'Yantai ZhiYang Scientific and technology industry CO., LTD',
+   0x000B32: u'VORMETRIC, INC.',
+   0x000B33: u'Vivato',
+   0x000B34: u'ShangHai Broadband Technologies CO.LTD',
+   0x000B35: u'Quad Bit System co., Ltd.',
+   0x000B36: u'Productivity Systems, Inc.',
+   0x000B37: u'MANUFACTURE DES MONTRES ROLEX SA',
+   0x000B38: u'Knuerr AG',
+   0x000B39: u'Keisoku Giken Co.,Ltd.',
+   0x000B3A: u'QuStream Corporation',
+   0x000B3B: u'devolo AG',
+   0x000B3C: u'Cygnal Integrated Products, Inc.',
+   0x000B3D: u'CONTAL OK Ltd.',
+   0x000B3E: u'BittWare, Inc',
+   0x000B3F: u'Anthology Solutions Inc.',
+   0x000B40: u'OpNext Inc.',
+   0x000B41: u'Ing. Buero Dr. Beutlhauser',
+   0x000B42: u'commax Co., Ltd.',
+   0x000B43: u'Microscan Systems, Inc.',
+   0x000B44: u'Concord IDea Corp.',
+   0x000B45: u'Cisco',
+   0x000B46: u'Cisco',
+   0x000B47: u'Advanced Energy',
+   0x000B48: u'sofrel',
+   0x000B49: u'RF-Link System Inc.',
+   0x000B4A: u'Visimetrics (UK) Ltd',
+   0x000B4B: u'VISIOWAVE SA',
+   0x000B4C: u'Clarion (M) Sdn Bhd',
+   0x000B4D: u'Emuzed',
+   0x000B4E: u'VertexRSI Antenna Products Division',
+   0x000B4F: u'Verifone, INC.',
+   0x000B50: u'Oxygnet',
+   0x000B51: u'Micetek International Inc.',
+   0x000B52: u'JOYMAX ELECTRONICS CORP.',
+   0x000B53: u'INITIUM Co., Ltd.',
+   0x000B54: u'BiTMICRO Networks, Inc.',
+   0x000B55: u'ADInstruments',
+   0x000B56: u'Cybernetics',
+   0x000B57: u'Silicon Laboratories',
+   0x000B58: u'Astronautics C.A  LTD',
+   0x000B59: u'ScriptPro, LLC',
+   0x000B5A: u'HyperEdge',
+   0x000B5B: u'Rincon Research Corporation',
+   0x000B5C: u'Newtech Co.,Ltd',
+   0x000B5D: u'FUJITSU LIMITED',
+   0x000B5E: u'Audio Engineering Society Inc.',
+   0x000B5F: u'Cisco Systems',
+   0x000B60: u'Cisco Systems',
+   0x000B61: u'Friedrich Lütze GmbH &Co.',
+   0x000B62: u'Ingenieurbüro Ingo Mohnen',
+   0x000B63: u'Kaleidescape',
+   0x000B64: u'Kieback & Peter GmbH & Co KG',
+   0x000B65: u'Sy.A.C. srl',
+   0x000B66: u'Teralink Communications',
+   0x000B67: u'Topview Technology Corporation',
+   0x000B68: u'Addvalue Communications Pte Ltd',
+   0x000B69: u'Franke Finland Oy',
+   0x000B6A: u'Asiarock Incorporation',
+   0x000B6B: u'Wistron Neweb Corp.',
+   0x000B6C: u'Sychip Inc.',
+   0x000B6D: u'SOLECTRON JAPAN NAKANIIDA',
+   0x000B6E: u'Neff Instrument Corp.',
+   0x000B6F: u'Media Streaming Networks Inc',
+   0x000B70: u'Load Technology, Inc.',
+   0x000B71: u'Litchfield Communications Inc.',
+   0x000B72: u'Lawo AG',
+   0x000B73: u'Kodeos Communications',
+   0x000B74: u'Kingwave Technology Co., Ltd.',
+   0x000B75: u'Iosoft Ltd.',
+   0x000B76: u'ET&T Co. Ltd.',
+   0x000B77: u'Cogent Systems, Inc.',
+   0x000B78: u'TAIFATECH INC.',
+   0x000B79: u'X-COM, Inc.',
+   0x000B7A: u'Wave Science Inc.',
+   0x000B7B: u'Test-Um Inc.',
+   0x000B7C: u'Telex Communications',
+   0x000B7D: u'SOLOMON EXTREME INTERNATIONAL LTD.',
+   0x000B7E: u'SAGINOMIYA Seisakusho Inc.',
+   0x000B7F: u'OmniWerks',
+   0x000B80: u'Lycium Networks',
+   0x000B81: u'Kaparel Corporation',
+   0x000B82: u'Grandstream Networks, Inc.',
+   0x000B83: u'DATAWATT B.V.',
+   0x000B84: u'BODET',
+   0x000B85: u'Airespace, Inc.',
+   0x000B86: u'Aruba Networks',
+   0x000B87: u'American Reliance Inc.',
+   0x000B88: u'Vidisco ltd.',
+   0x000B89: u'Top Global Technology, Ltd.',
+   0x000B8A: u'MITEQ Inc.',
+   0x000B8B: u'KERAJET, S.A.',
+   0x000B8C: u'flextronics israel',
+   0x000B8D: u'Avvio Networks',
+   0x000B8E: u'Ascent Corporation',
+   0x000B8F: u'AKITA ELECTRONICS SYSTEMS CO.,LTD.',
+   0x000B90: u'Covaro Networks, Inc.',
+   0x000B91: u'Aglaia Gesellschaft für Bildverarbeitung und Kommunikation m',
+   0x000B92: u'Ascom Danmark A/S',
+   0x000B93: u'Barmag Electronic',
+   0x000B94: u'Digital Monitoring Products, Inc.',
+   0x000B95: u'eBet Gaming Systems Pty Ltd',
+   0x000B96: u'Innotrac Diagnostics Oy',
+   0x000B97: u'Matsushita Electric Industrial Co.,Ltd.',
+   0x000B98: u'NiceTechVision',
+   0x000B99: u'SensAble Technologies, Inc.',
+   0x000B9A: u'Shanghai Ulink Telecom Equipment Co. Ltd.',
+   0x000B9B: u'Sirius System Co, Ltd.',
+   0x000B9C: u'TriBeam Technologies, Inc.',
+   0x000B9D: u'TwinMOS Technologies Inc.',
+   0x000B9E: u'Yasing Technology Corp.',
+   0x000B9F: u'Neue ELSA GmbH',
+   0x000BA0: u'T&L Information Inc.',
+   0x000BA1: u'SYSCOM Ltd.',
+   0x000BA2: u'Sumitomo Electric Networks, Inc',
+   0x000BA3: u'Siemens AG, I&S',
+   0x000BA4: u'Shiron Satellite Communications Ltd. (1996)',
+   0x000BA5: u'Quasar Cipta Mandiri, PT',
+   0x000BA6: u'Miyakawa Electric Works Ltd.',
+   0x000BA7: u'Maranti Networks',
+   0x000BA8: u'HANBACK ELECTRONICS CO., LTD.',
+   0x000BA9: u'CloudShield Technologies, Inc.',
+   0x000BAA: u'Aiphone co.,Ltd',
+   0x000BAB: u'Advantech Technology (CHINA) Co., Ltd.',
+   0x000BAC: u'3Com Europe Ltd.',
+   0x000BAD: u'PC-PoS Inc.',
+   0x000BAE: u'Vitals System Inc.',
+   0x000BAF: u'WOOJU COMMUNICATIONS Co,.Ltd',
+   0x000BB0: u'Sysnet Telematica srl',
+   0x000BB1: u'Super Star Technology Co., Ltd.',
+   0x000BB2: u'SMALLBIG TECHNOLOGY',
+   0x000BB3: u'RiT technologies Ltd.',
+   0x000BB4: u'RDC Semiconductor Inc.,',
+   0x000BB5: u'nStor Technologies, Inc.',
+   0x000BB6: u'Mototech Inc.',
+   0x000BB7: u'Micro Systems Co.,Ltd.',
+   0x000BB8: u'Kihoku Electronic Co.',
+   0x000BB9: u'Imsys AB',
+   0x000BBA: u'Harmonic Broadband Access Networks',
+   0x000BBB: u'Etin Systems Co., Ltd',
+   0x000BBC: u'En Garde Systems, Inc.',
+   0x000BBD: u'Connexionz Limited',
+   0x000BBE: u'Cisco Systems',
+   0x000BBF: u'Cisco Systems',
+   0x000BC0: u'China IWNComm Co., Ltd.',
+   0x000BC1: u'Bay Microsystems, Inc.',
+   0x000BC2: u'Corinex Communication Corp.',
+   0x000BC3: u'Multiplex, Inc.',
+   0x000BC4: u'BIOTRONIK GmbH & Co',
+   0x000BC5: u'SMC Networks, Inc.',
+   0x000BC6: u'ISAC, Inc.',
+   0x000BC7: u'ICET S.p.A.',
+   0x000BC8: u'AirFlow Networks',
+   0x000BC9: u'Electroline Equipment',
+   0x000BCA: u'DATAVAN International Corporation',
+   0x000BCB: u'Fagor Automation , S. Coop',
+   0x000BCC: u'JUSAN, S.A.',
+   0x000BCD: u'Compaq (HP)',
+   0x000BCE: u'Free2move AB',
+   0x000BCF: u'AGFA NDT INC.',
+   0x000BD0: u'XiMeta Technology Americas Inc.',
+   0x000BD1: u'Aeronix, Inc.',
+   0x000BD2: u'Remopro Technology Inc.',
+   0x000BD3: u'cd3o',
+   0x000BD4: u'Beijing Wise Technology & Science Development Co.Ltd',
+   0x000BD5: u'Nvergence, Inc.',
+   0x000BD6: u'Paxton Access Ltd',
+   0x000BD7: u'MBB Gelma GmbH',
+   0x000BD8: u'Industrial Scientific Corp.',
+   0x000BD9: u'General Hydrogen',
+   0x000BDA: u'EyeCross Co.,Inc.',
+   0x000BDB: u'Dell ESG PCBA Test',
+   0x000BDC: u'AKCP',
+   0x000BDD: u'TOHOKU RICOH Co., LTD.',
+   0x000BDE: u'TELDIX GmbH',
+   0x000BDF: u'Shenzhen RouterD Networks Limited',
+   0x000BE0: u'SercoNet Ltd.',
+   0x000BE1: u'Nokia NET Product Operations',
+   0x000BE2: u'Lumenera Corporation',
+   0x000BE3: u'Key Stream Co., Ltd.',
+   0x000BE4: u'Hosiden Corporation',
+   0x000BE5: u'HIMS Korea Co., Ltd.',
+   0x000BE6: u'Datel Electronics',
+   0x000BE7: u'COMFLUX TECHNOLOGY INC.',
+   0x000BE8: u'AOIP',
+   0x000BE9: u'Actel Corporation',
+   0x000BEA: u'Zultys Technologies',
+   0x000BEB: u'Systegra AG',
+   0x000BEC: u'NIPPON ELECTRIC INSTRUMENT, INC.',
+   0x000BED: u'ELM Inc.',
+   0x000BEE: u'inc.jet, Incorporated',
+   0x000BEF: u'Code Corporation',
+   0x000BF0: u'MoTEX Products Co., Ltd.',
+   0x000BF1: u'LAP Laser Applikations',
+   0x000BF2: u'Chih-Kan Technology Co., Ltd.',
+   0x000BF3: u'BAE SYSTEMS',
+   0x000BF4: u'PRIVATE',
+   0x000BF5: u'Shanghai Sibo Telecom Technology Co.,Ltd',
+   0x000BF6: u'Nitgen Co., Ltd',
+   0x000BF7: u'NIDEK CO.,LTD',
+   0x000BF8: u'Infinera',
+   0x000BF9: u'Gemstone communications, Inc.',
+   0x000BFA: u'EXEMYS SRL',
+   0x000BFB: u'D-NET International Corporation',
+   0x000BFC: u'Cisco Systems',
+   0x000BFD: u'Cisco Systems',
+   0x000BFE: u'CASTEL Broadband Limited',
+   0x000BFF: u'Berkeley Camera Engineering',
+   0x000C00: u'BEB Industrie-Elektronik AG',
+   0x000C01: u'Abatron AG',
+   0x000C02: u'ABB Oy',
+   0x000C03: u'HDMI Licensing, LLC',
+   0x000C04: u'Tecnova',
+   0x000C05: u'RPA Reserch Co., Ltd.',
+   0x000C06: u'Nixvue Systems  Pte Ltd',
+   0x000C07: u'Iftest AG',
+   0x000C08: u'HUMEX Technologies Corp.',
+   0x000C09: u'Hitachi IE Systems Co., Ltd',
+   0x000C0A: u'Guangdong Province Electronic Technology Research Institute',
+   0x000C0B: u'Broadbus Technologies',
+   0x000C0C: u'APPRO TECHNOLOGY INC.',
+   0x000C0D: u'Communications & Power Industries / Satcom Division',
+   0x000C0E: u'XtremeSpectrum, Inc.',
+   0x000C0F: u'Techno-One Co., Ltd',
+   0x000C10: u'PNI Corporation',
+   0x000C11: u'NIPPON DEMPA CO.,LTD.',
+   0x000C12: u'Micro-Optronic-Messtechnik GmbH',
+   0x000C13: u'MediaQ',
+   0x000C14: u'Diagnostic Instruments, Inc.',
+   0x000C15: u'CyberPower Systems, Inc.',
+   0x000C16: u'Concorde Microsystems Inc.',
+   0x000C17: u'AJA Video Systems Inc',
+   0x000C18: u'Zenisu Keisoku Inc.',
+   0x000C19: u'Telio Communications GmbH',
+   0x000C1A: u'Quest Technical Solutions Inc.',
+   0x000C1B: u'ORACOM Co, Ltd.',
+   0x000C1C: u'MicroWeb Co., Ltd.',
+   0x000C1D: u'Mettler & Fuchs AG',
+   0x000C1E: u'Global Cache',
+   0x000C1F: u'Glimmerglass Networks',
+   0x000C20: u'Fi WIn, Inc.',
+   0x000C21: u'Faculty of Science and Technology, Keio University',
+   0x000C22: u'Double D Electronics Ltd',
+   0x000C23: u'Beijing Lanchuan Tech. Co., Ltd.',
+   0x000C24: u'ANATOR',
+   0x000C25: u'Allied Telesyn Networks',
+   0x000C26: u'Weintek Labs. Inc.',
+   0x000C27: u'Sammy Corporation',
+   0x000C28: u'RIFATRON',
+   0x000C29: u'VMware, Inc.',
+   0x000C2A: u'OCTTEL Communication Co., Ltd.',
+   0x000C2B: u'ELIAS Technology, Inc.',
+   0x000C2C: u'Enwiser Inc.',
+   0x000C2D: u'FullWave Technology Co., Ltd.',
+   0x000C2E: u'Openet information technology(shenzhen) Co., Ltd.',
+   0x000C2F: u'SeorimTechnology Co.,Ltd.',
+   0x000C30: u'Cisco',
+   0x000C31: u'Cisco',
+   0x000C32: u'Avionic Design Development GmbH',
+   0x000C33: u'Compucase Enterprise Co. Ltd.',
+   0x000C34: u'Vixen Co., Ltd.',
+   0x000C35: u'KaVo Dental GmbH & Co. KG',
+   0x000C36: u'SHARP TAKAYA ELECTRONICS INDUSTRY CO.,LTD.',
+   0x000C37: u'Geomation, Inc.',
+   0x000C38: u'TelcoBridges Inc.',
+   0x000C39: u'Sentinel Wireless Inc.',
+   0x000C3A: u'Oxance',
+   0x000C3B: u'Orion Electric Co., Ltd.',
+   0x000C3C: u'MediaChorus, Inc.',
+   0x000C3D: u'Glsystech Co., Ltd.',
+   0x000C3E: u'Crest Audio',
+   0x000C3F: u'Cogent Defence & Security Networks,',
+   0x000C40: u'Altech Controls',
+   0x000C41: u'The Linksys Group, Inc.',
+   0x000C42: u'Routerboard.com',
+   0x000C43: u'Ralink Technology, Corp.',
+   0x000C44: u'Automated Interfaces, Inc.',
+   0x000C45: u'Animation Technologies Inc.',
+   0x000C46: u'Allied Telesyn Inc.',
+   0x000C47: u'SK Teletech(R&D Planning Team)',
+   0x000C48: u'QoStek Corporation',
+   0x000C49: u'Dangaard Telecom RTC Division A/S',
+   0x000C4A: u'Cygnus Microsystems Private Limited',
+   0x000C4B: u'Cheops Elektronik',
+   0x000C4C: u'Arcor AG&Co.',
+   0x000C4D: u'ACRA CONTROL',
+   0x000C4E: u'Winbest Technology CO,LT',
+   0x000C4F: u'UDTech Japan Corporation',
+   0x000C50: u'Seagate Technology',
+   0x000C51: u'Scientific Technologies Inc.',
+   0x000C52: u'Roll Systems Inc.',
+   0x000C53: u'PRIVATE',
+   0x000C54: u'Pedestal Networks, Inc',
+   0x000C55: u'Microlink Communications Inc.',
+   0x000C56: u'Megatel Computer (1986) Corp.',
+   0x000C57: u'MACKIE Engineering Services Belgium BVBA',
+   0x000C58: u'M&S Systems',
+   0x000C59: u'Indyme Electronics, Inc.',
+   0x000C5A: u'IBSmm Industrieelektronik Multimedia',
+   0x000C5B: u'HANWANG TECHNOLOGY CO.,LTD',
+   0x000C5C: u'GTN Systems B.V.',
+   0x000C5D: u'CHIC TECHNOLOGY (CHINA) CORP.',
+   0x000C5E: u'Calypso Medical',
+   0x000C5F: u'Avtec, Inc.',
+   0x000C60: u'ACM Systems',
+   0x000C61: u'AC Tech corporation DBA Advanced Digital',
+   0x000C62: u'ABB Automation Technology Products AB, Control',
+   0x000C63: u'Zenith Electronics Corporation',
+   0x000C64: u'X2 MSA Group',
+   0x000C65: u'Sunin Telecom',
+   0x000C66: u'Pronto Networks Inc',
+   0x000C67: u'OYO ELECTRIC CO.,LTD',
+   0x000C68: u'SigmaTel, Inc.',
+   0x000C69: u'National Radio Astronomy Observatory',
+   0x000C6A: u'MBARI',
+   0x000C6B: u'Kurz Industrie-Elektronik GmbH',
+   0x000C6C: u'Elgato Systems LLC',
+   0x000C6D: u'BOC Edwards',
+   0x000C6E: u'ASUSTEK COMPUTER INC.',
+   0x000C6F: u'Amtek system co.,LTD.',
+   0x000C70: u'ACC GmbH',
+   0x000C71: u'Wybron, Inc',
+   0x000C72: u'Tempearl Industrial Co., Ltd.',
+   0x000C73: u'TELSON ELECTRONICS CO., LTD',
+   0x000C74: u'RIVERTEC CORPORATION',
+   0x000C75: u'Oriental integrated electronics. LTD',
+   0x000C76: u'MICRO-STAR INTERNATIONAL CO., LTD.',
+   0x000C77: u'Life Racing Ltd',
+   0x000C78: u'In-Tech Electronics Limited',
+   0x000C79: u'Extel Communications P/L',
+   0x000C7A: u'DaTARIUS Technologies GmbH',
+   0x000C7B: u'ALPHA PROJECT Co.,Ltd.',
+   0x000C7C: u'Internet Information Image Inc.',
+   0x000C7D: u'TEIKOKU ELECTRIC MFG. CO., LTD',
+   0x000C7E: u'Tellium Incorporated',
+   0x000C7F: u'synertronixx GmbH',
+   0x000C80: u'Opelcomm Inc.',
+   0x000C81: u'Nulec Industries Pty Ltd',
+   0x000C82: u'NETWORK TECHNOLOGIES INC',
+   0x000C83: u'Logical Solutions',
+   0x000C84: u'Eazix, Inc.',
+   0x000C85: u'Cisco Systems',
+   0x000C86: u'Cisco Systems',
+   0x000C87: u'ATI',
+   0x000C88: u'Apache Micro Peripherals, Inc.',
+   0x000C89: u'AC Electric Vehicles, Ltd.',
+   0x000C8A: u'Bose Corporation',
+   0x000C8B: u'Connect Tech Inc',
+   0x000C8C: u'KODICOM CO.,LTD.',
+   0x000C8D: u'MATRIX VISION GmbH',
+   0x000C8E: u'Mentor Engineering Inc',
+   0x000C8F: u'Nergal s.r.l.',
+   0x000C90: u'Octasic Inc.',
+   0x000C91: u'Riverhead Networks Inc.',
+   0x000C92: u'WolfVision Gmbh',
+   0x000C93: u'Xeline Co., Ltd.',
+   0x000C94: u'United Electronic Industries, Inc.',
+   0x000C95: u'PrimeNet',
+   0x000C96: u'OQO, Inc.',
+   0x000C97: u'NV ADB TTV Technologies SA',
+   0x000C98: u'LETEK Communications Inc.',
+   0x000C99: u'HITEL LINK Co.,Ltd',
+   0x000C9A: u'Hitech Electronics Corp.',
+   0x000C9B: u'EE Solutions, Inc',
+   0x000C9C: u'Chongho information & communications',
+   0x000C9D: u'AirWalk Communications, Inc.',
+   0x000C9E: u'MemoryLink Corp.',
+   0x000C9F: u'NKE Corporation',
+   0x000CA0: u'StorCase Technology, Inc.',
+   0x000CA1: u'SIGMACOM Co., LTD.',
+   0x000CA2: u'Scopus Network Technologies Ltd',
+   0x000CA3: u'Rancho Technology, Inc.',
+   0x000CA4: u'Prompttec Product Management GmbH',
+   0x000CA5: u'Naman NZ LTd',
+   0x000CA6: u'Mintera Corporation',
+   0x000CA7: u'Metro (Suzhou) Technologies Co., Ltd.',
+   0x000CA8: u'Garuda Networks Corporation',
+   0x000CA9: u'Ebtron Inc.',
+   0x000CAA: u'Cubic Transportation Systems Inc',
+   0x000CAB: u'COMMEND International',
+   0x000CAC: u'Citizen Watch Co., Ltd.',
+   0x000CAD: u'BTU International',
+   0x000CAE: u'Ailocom Oy',
+   0x000CAF: u'TRI TERM CO.,LTD.',
+   0x000CB0: u'Star Semiconductor Corporation',
+   0x000CB1: u'Salland Engineering (Europe) BV',
+   0x000CB2: u'safei Co., Ltd.',
+   0x000CB3: u'ROUND Co.,Ltd.',
+   0x000CB4: u'AutoCell Laboratories, Inc.',
+   0x000CB5: u'Premier Technolgies, Inc',
+   0x000CB6: u'NANJING SEU MOBILE & INTERNET TECHNOLOGY CO.,LTD',
+   0x000CB7: u'Nanjing Huazhuo Electronics Co., Ltd.',
+   0x000CB8: u'MEDION AG',
+   0x000CB9: u'LEA',
+   0x000CBA: u'Jamex',
+   0x000CBB: u'ISKRAEMECO',
+   0x000CBC: u'Iscutum',
+   0x000CBD: u'Interface Masters, Inc',
+   0x000CBE: u'PRIVATE',
+   0x000CBF: u'Holy Stone Ent. Co., Ltd.',
+   0x000CC0: u'Genera Oy',
+   0x000CC1: u'Cooper Industries Inc.',
+   0x000CC2: u'PRIVATE',
+   0x000CC3: u'BeWAN systems',
+   0x000CC4: u'Tiptel AG',
+   0x000CC5: u'Nextlink Co., Ltd.',
+   0x000CC6: u'Ka-Ro electronics GmbH',
+   0x000CC7: u'Intelligent Computer Solutions Inc.',
+   0x000CC8: u'Xytronix Research & Design, Inc.',
+   0x000CC9: u'ILWOO DATA & TECHNOLOGY CO.,LTD',
+   0x000CCA: u'Hitachi Global Storage Technologies',
+   0x000CCB: u'Design Combus Ltd',
+   0x000CCC: u'Aeroscout Ltd.',
+   0x000CCD: u'IEC - TC57',
+   0x000CCE: u'Cisco Systems',
+   0x000CCF: u'Cisco Systems',
+   0x000CD0: u'Symetrix',
+   0x000CD1: u'SFOM Technology Corp.',
+   0x000CD2: u'Schaffner EMV AG',
+   0x000CD3: u'Prettl Elektronik Radeberg GmbH',
+   0x000CD4: u'Positron Public Safety Systems inc.',
+   0x000CD5: u'Passave Inc.',
+   0x000CD6: u'PARTNER TECH',
+   0x000CD7: u'Nallatech Ltd',
+   0x000CD8: u'M. K. Juchheim GmbH & Co',
+   0x000CD9: u'Itcare Co., Ltd',
+   0x000CDA: u'FreeHand Systems, Inc.',
+   0x000CDB: u'Foundry Networks',
+   0x000CDC: u'BECS Technology, Inc',
+   0x000CDD: u'AOS Technologies AG',
+   0x000CDE: u'ABB STOTZ-KONTAKT GmbH',
+   0x000CDF: u'PULNiX America, Inc',
+   0x000CE0: u'Trek Diagnostics Inc.',
+   0x000CE1: u'The Open Group',
+   0x000CE2: u'Rolls-Royce',
+   0x000CE3: u'Option International N.V.',
+   0x000CE4: u'NeuroCom International, Inc.',
+   0x000CE5: u'Motorola BCS',
+   0x000CE6: u'Meru Networks Inc',
+   0x000CE7: u'MediaTek Inc.',
+   0x000CE8: u'GuangZhou AnJuBao Co., Ltd',
+   0x000CE9: u'BLOOMBERG L.P.',
+   0x000CEA: u'aphona Kommunikationssysteme',
+   0x000CEB: u'CNMP Networks, Inc.',
+   0x000CEC: u'Spectracom Corp.',
+   0x000CED: u'Real Digital Media',
+   0x000CEE: u'jp-embedded',
+   0x000CEF: u'Open Networks Engineering Ltd',
+   0x000CF0: u'M & N GmbH',
+   0x000CF1: u'Intel Corporation',
+   0x000CF2: u'GAMESA EÓLICA',
+   0x000CF3: u'CALL IMAGE SA',
+   0x000CF4: u'AKATSUKI ELECTRIC MFG.CO.,LTD.',
+   0x000CF5: u'InfoExpress',
+   0x000CF6: u'Sitecom Europe BV',
+   0x000CF7: u'Nortel Networks',
+   0x000CF8: u'Nortel Networks',
+   0x000CF9: u'ITT Flygt AB',
+   0x000CFA: u'Digital Systems Corp',
+   0x000CFB: u'Korea Network Systems',
+   0x000CFC: u'S2io Technologies Corp',
+   0x000CFD: u'PRIVATE',
+   0x000CFE: u'Grand Electronic Co., Ltd',
+   0x000CFF: u'MRO-TEK LIMITED',
+   0x000D00: u'Seaway Networks Inc.',
+   0x000D01: u'P&E Microcomputer Systems, Inc.',
+   0x000D02: u'NEC AccessTechnica,Ltd',
+   0x000D03: u'Matrics, Inc.',
+   0x000D04: u'Foxboro Eckardt Development GmbH',
+   0x000D05: u'cybernet manufacturing inc.',
+   0x000D06: u'Compulogic Limited',
+   0x000D07: u'Calrec Audio Ltd',
+   0x000D08: u'AboveCable, Inc.',
+   0x000D09: u'Yuehua(Zhuhai) Electronic CO. LTD',
+   0x000D0A: u'Projectiondesign as',
+   0x000D0B: u'Buffalo Inc.',
+   0x000D0C: u'MDI Security Systems',
+   0x000D0D: u'ITSupported, LLC',
+   0x000D0E: u'Inqnet Systems, Inc.',
+   0x000D0F: u'Finlux Ltd',
+   0x000D10: u'Embedtronics Oy',
+   0x000D11: u'DENTSPLY - Gendex',
+   0x000D12: u'AXELL Corporation',
+   0x000D13: u'Wilhelm Rutenbeck GmbH&Co.',
+   0x000D14: u'Vtech Innovation LP dba Advanced American Telephones',
+   0x000D15: u'Voipac s.r.o.',
+   0x000D16: u'UHS Systems Pty Ltd',
+   0x000D17: u'Turbo Networks Co.Ltd',
+   0x000D18: u'Sunitec Enterprise Co., Ltd.',
+   0x000D19: u'ROBE Show lighting',
+   0x000D1A: u'Mustek System Inc.',
+   0x000D1B: u'Kyoto Electronics Manufacturing Co., Ltd.',
+   0x000D1C: u'I2E TELECOM',
+   0x000D1D: u'HIGH-TEK HARNESS ENT. CO., LTD.',
+   0x000D1E: u'Control Techniques',
+   0x000D1F: u'AV Digital',
+   0x000D20: u'ASAHIKASEI TECHNOSYSTEM CO.,LTD.',
+   0x000D21: u'WISCORE Inc.',
+   0x000D22: u'Unitronics',
+   0x000D23: u'Smart Solution, Inc',
+   0x000D24: u'SENTEC E&E CO., LTD.',
+   0x000D25: u'SANDEN CORPORATION',
+   0x000D26: u'Primagraphics Limited',
+   0x000D27: u'MICROPLEX Printware AG',
+   0x000D28: u'Cisco',
+   0x000D29: u'Cisco',
+   0x000D2A: u'Scanmatic AS',
+   0x000D2B: u'Racal Instruments',
+   0x000D2C: u'Patapsco Designs Ltd',
+   0x000D2D: u'NCT Deutschland GmbH',
+   0x000D2E: u'Matsushita Avionics Systems Corporation',
+   0x000D2F: u'AIN Comm.Tech.Co., LTD',
+   0x000D30: u'IceFyre Semiconductor',
+   0x000D31: u'Compellent Technologies, Inc.',
+   0x000D32: u'DispenseSource, Inc.',
+   0x000D33: u'Prediwave Corp.',
+   0x000D34: u'Shell International Exploration and Production, Inc.',
+   0x000D35: u'PAC International Ltd',
+   0x000D36: u'Wu Han Routon Electronic Co., Ltd',
+   0x000D37: u'WIPLUG',
+   0x000D38: u'NISSIN INC.',
+   0x000D39: u'Network Electronics',
+   0x000D3A: u'Microsoft Corp.',
+   0x000D3B: u'Microelectronics Technology Inc.',
+   0x000D3C: u'i.Tech Dynamic Ltd',
+   0x000D3D: u'Hammerhead Systems, Inc.',
+   0x000D3E: u'APLUX Communications Ltd.',
+   0x000D3F: u'VXI Technology',
+   0x000D40: u'Verint Loronix Video Solutions',
+   0x000D41: u'Siemens AG ICM MP UC RD IT KLF1',
+   0x000D42: u'Newbest Development Limited',
+   0x000D43: u'DRS Tactical Systems Inc.',
+   0x000D44: u'PRIVATE',
+   0x000D45: u'Tottori SANYO Electric Co., Ltd.',
+   0x000D46: u'SSD Drives, Inc.',
+   0x000D47: u'Collex',
+   0x000D48: u'AEWIN Technologies Co., Ltd.',
+   0x000D49: u'Triton Systems of Delaware, Inc.',
+   0x000D4A: u'Steag ETA-Optik',
+   0x000D4B: u'Roku, LLC',
+   0x000D4C: u'Outline Electronics Ltd.',
+   0x000D4D: u'Ninelanes',
+   0x000D4E: u'NDR Co.,LTD.',
+   0x000D4F: u'Kenwood Corporation',
+   0x000D50: u'Galazar Networks',
+   0x000D51: u'DIVR Systems, Inc.',
+   0x000D52: u'Comart system',
+   0x000D53: u'Beijing 5w Communication Corp.',
+   0x000D54: u'3Com Europe Ltd',
+   0x000D55: u'SANYCOM Technology Co.,Ltd',
+   0x000D56: u'Dell PCBA Test',
+   0x000D57: u'Fujitsu I-Network Systems Limited.',
+   0x000D58: u'PRIVATE',
+   0x000D59: u'Amity Systems, Inc.',
+   0x000D5A: u'Tiesse SpA',
+   0x000D5B: u'Smart Empire Investments Limited',
+   0x000D5C: u'Robert Bosch GmbH, VT-ATMO',
+   0x000D5D: u'Raritan Computer, Inc',
+   0x000D5E: u'NEC CustomTechnica, Ltd.',
+   0x000D5F: u'Minds Inc',
+   0x000D60: u'IBM Corporation',
+   0x000D61: u'Giga-Byte Technology Co., Ltd.',
+   0x000D62: u'Funkwerk Dabendorf GmbH',
+   0x000D63: u'DENT Instruments, Inc.',
+   0x000D64: u'COMAG Handels AG',
+   0x000D65: u'Cisco Systems',
+   0x000D66: u'Cisco Systems',
+   0x000D67: u'BelAir Networks Inc.',
+   0x000D68: u'Vinci Systems, Inc.',
+   0x000D69: u'TMT&D Corporation',
+   0x000D6A: u'Redwood Technologies LTD',
+   0x000D6B: u'Mita-Teknik A/S',
+   0x000D6C: u'M-Audio',
+   0x000D6D: u'K-Tech Devices Corp.',
+   0x000D6E: u'K-Patents Oy',
+   0x000D6F: u'Ember Corporation',
+   0x000D70: u'Datamax Corporation',
+   0x000D71: u'boca systems',
+   0x000D72: u'2Wire, Inc',
+   0x000D73: u'Technical Support, Inc.',
+   0x000D74: u'Sand Network Systems, Inc.',
+   0x000D75: u'Kobian Pte Ltd - Taiwan Branch',
+   0x000D76: u'Hokuto Denshi Co,. Ltd.',
+   0x000D77: u'FalconStor Software',
+   0x000D78: u'Engineering & Security',
+   0x000D79: u'Dynamic Solutions Co,.Ltd.',
+   0x000D7A: u'DiGATTO Asia Pacific Pte Ltd',
+   0x000D7B: u'Consensys Computers Inc.',
+   0x000D7C: u'Codian Ltd',
+   0x000D7D: u'Afco Systems',
+   0x000D7E: u'Axiowave Networks, Inc.',
+   0x000D7F: u'MIDAS  COMMUNICATION TECHNOLOGIES PTE LTD ( Foreign Branch)',
+   0x000D80: u'Online Development Inc',
+   0x000D81: u'Pepperl+Fuchs GmbH',
+   0x000D82: u'PHS srl',
+   0x000D83: u'Sanmina-SCI Hungary  Ltd.',
+   0x000D84: u'Makus Inc.',
+   0x000D85: u'Tapwave, Inc.',
+   0x000D86: u'Huber + Suhner AG',
+   0x000D87: u'Elitegroup Computer System Co. (ECS)',
+   0x000D88: u'D-Link Corporation',
+   0x000D89: u'Bils Technology Inc',
+   0x000D8A: u'Winners Electronics Co., Ltd.',
+   0x000D8B: u'T&D Corporation',
+   0x000D8C: u'Shanghai Wedone Digital Ltd. CO.',
+   0x000D8D: u'ProLinx Communication Gateways, Inc.',
+   0x000D8E: u'Koden Electronics Co., Ltd.',
+   0x000D8F: u'King Tsushin Kogyo Co., LTD.',
+   0x000D90: u'Factum Electronics AB',
+   0x000D91: u'Eclipse (HQ Espana) S.L.',
+   0x000D92: u'Arima Communication Corporation',
+   0x000D93: u'Apple Computer',
+   0x000D94: u'AFAR Communications,Inc',
+   0x000D95: u'Opti-cell, Inc.',
+   0x000D96: u'Vtera Technology Inc.',
+   0x000D97: u'Tropos Networks, Inc.',
+   0x000D98: u'S.W.A.C. Schmitt-Walter Automation Consult GmbH',
+   0x000D99: u'Orbital Sciences Corp.; Launch Systems Group',
+   0x000D9A: u'INFOTEC LTD',
+   0x000D9B: u'Heraeus Electro-Nite International N.V.',
+   0x000D9C: u'Elan GmbH & Co KG',
+   0x000D9D: u'Hewlett Packard',
+   0x000D9E: u'TOKUDEN OHIZUMI SEISAKUSYO Co.,Ltd.',
+   0x000D9F: u'RF Micro Devices',
+   0x000DA0: u'NEDAP N.V.',
+   0x000DA1: u'MIRAE ITS Co.,LTD.',
+   0x000DA2: u'Infrant Technologies, Inc.',
+   0x000DA3: u'Emerging Technologies Limited',
+   0x000DA4: u'DOSCH & AMAND SYSTEMS AG',
+   0x000DA5: u'Fabric7 Systems, Inc',
+   0x000DA6: u'Universal Switching Corporation',
+   0x000DA7: u'PRIVATE',
+   0x000DA8: u'Teletronics Technology Corporation',
+   0x000DA9: u'T.E.A.M. S.L.',
+   0x000DAA: u'S.A.Tehnology co.,Ltd.',
+   0x000DAB: u'Parker Hannifin GmbH Electromechanical Division Europe',
+   0x000DAC: u'Japan CBM Corporation',
+   0x000DAD: u'Dataprobe Inc',
+   0x000DAE: u'SAMSUNG HEAVY INDUSTRIES CO., LTD.',
+   0x000DAF: u'Plexus Corp (UK) Ltd',
+   0x000DB0: u'Olym-tech Co.,Ltd.',
+   0x000DB1: u'Japan Network Service Co., Ltd.',
+   0x000DB2: u'Ammasso, Inc.',
+   0x000DB3: u'SDO Communication Corperation',
+   0x000DB4: u'NETASQ',
+   0x000DB5: u'GLOBALSAT TECHNOLOGY CORPORATION',
+   0x000DB6: u'Teknovus, Inc.',
+   0x000DB7: u'SANKO ELECTRIC CO,.LTD',
+   0x000DB8: u'SCHILLER AG',
+   0x000DB9: u'PC Engines GmbH',
+   0x000DBA: u'Océ Document Technologies GmbH',
+   0x000DBB: u'Nippon Dentsu Co.,Ltd.',
+   0x000DBC: u'Cisco Systems',
+   0x000DBD: u'Cisco Systems',
+   0x000DBE: u'Bel Fuse Europe Ltd.,UK',
+   0x000DBF: u'TekTone Sound & Signal Mfg., Inc.',
+   0x000DC0: u'Spagat AS',
+   0x000DC1: u'SafeWeb Inc',
+   0x000DC2: u'PRIVATE',
+   0x000DC3: u'First Communication, Inc.',
+   0x000DC4: u'Emcore Corporation',
+   0x000DC5: u'EchoStar International Corporation',
+   0x000DC6: u'DigiRose Technology Co., Ltd.',
+   0x000DC7: u'COSMIC ENGINEERING INC.',
+   0x000DC8: u'AirMagnet, Inc',
+   0x000DC9: u'THALES Elektronik Systeme GmbH',
+   0x000DCA: u'Tait Electronics',
+   0x000DCB: u'Petcomkorea Co., Ltd.',
+   0x000DCC: u'NEOSMART Corp.',
+   0x000DCD: u'GROUPE TXCOM',
+   0x000DCE: u'Dynavac Technology Pte Ltd',
+   0x000DCF: u'Cidra Corp.',
+   0x000DD0: u'TetraTec Instruments GmbH',
+   0x000DD1: u'Stryker Corporation',
+   0x000DD2: u'Simrad Optronics ASA',
+   0x000DD3: u'SAMWOO Telecommunication Co.,Ltd.',
+   0x000DD4: u'Revivio Inc.',
+   0x000DD5: u'O\'RITE TECHNOLOGY CO.,LTD',
+   0x000DD6: u'ITI    LTD',
+   0x000DD7: u'Bright',
+   0x000DD8: u'BBN',
+   0x000DD9: u'Anton Paar GmbH',
+   0x000DDA: u'ALLIED TELESIS K.K.',
+   0x000DDB: u'AIRWAVE TECHNOLOGIES INC.',
+   0x000DDC: u'VAC',
+   0x000DDD: u'PROFÝLO TELRA ELEKTRONÝK SANAYÝ VE TÝCARET A.Þ.',
+   0x000DDE: u'Joyteck Co., Ltd.',
+   0x000DDF: u'Japan Image & Network Inc.',
+   0x000DE0: u'ICPDAS Co.,LTD',
+   0x000DE1: u'Control Products, Inc.',
+   0x000DE2: u'CMZ Sistemi Elettronici',
+   0x000DE3: u'AT Sweden AB',
+   0x000DE4: u'DIGINICS, Inc.',
+   0x000DE5: u'Samsung Thales',
+   0x000DE6: u'YOUNGBO ENGINEERING CO.,LTD',
+   0x000DE7: u'Snap-on OEM Group',
+   0x000DE8: u'Nasaco Electronics Pte. Ltd',
+   0x000DE9: u'Napatech Aps',
+   0x000DEA: u'Kingtel Telecommunication Corp.',
+   0x000DEB: u'CompXs Limited',
+   0x000DEC: u'Cisco Systems',
+   0x000DED: u'Cisco Systems',
+   0x000DEE: u'Andrew RF Power Amplifier Group',
+   0x000DEF: u'Soc. Coop. Bilanciai',
+   0x000DF0: u'QCOM TECHNOLOGY INC.',
+   0x000DF1: u'IONIX INC.',
+   0x000DF2: u'PRIVATE',
+   0x000DF3: u'Asmax Solutions',
+   0x000DF4: u'Watertek Co.',
+   0x000DF5: u'Teletronics International Inc.',
+   0x000DF6: u'Technology Thesaurus Corp.',
+   0x000DF7: u'Space Dynamics Lab',
+   0x000DF8: u'ORGA Kartensysteme GmbH',
+   0x000DF9: u'NDS Limited',
+   0x000DFA: u'Micro Control Systems Ltd.',
+   0x000DFB: u'Komax AG',
+   0x000DFC: u'ITFOR Inc. resarch and development',
+   0x000DFD: u'Huges Hi-Tech Inc.,',
+   0x000DFE: u'Hauppauge Computer Works, Inc.',
+   0x000DFF: u'CHENMING MOLD INDUSTRY CORP.',
+   0x000E00: u'Atrie',
+   0x000E01: u'ASIP Technologies Inc.',
+   0x000E02: u'Advantech AMT Inc.',
+   0x000E03: u'Emulex',
+   0x000E04: u'CMA/Microdialysis AB',
+   0x000E05: u'WIRELESS MATRIX CORP.',
+   0x000E06: u'Team Simoco Ltd',
+   0x000E07: u'Sony Ericsson Mobile Communications AB',
+   0x000E08: u'Sipura Technology, Inc.',
+   0x000E09: u'Shenzhen Coship Software Co.,LTD.',
+   0x000E0A: u'SAKUMA DESIGN OFFICE',
+   0x000E0B: u'Netac Technology Co., Ltd.',
+   0x000E0C: u'Intel Corporation',
+   0x000E0D: u'HESCH Schröder GmbH',
+   0x000E0E: u'ESA elettronica S.P.A.',
+   0x000E0F: u'ERMME',
+   0x000E10: u'PRIVATE',
+   0x000E11: u'BDT Büro- und Datentechnik GmbH & Co. KG',
+   0x000E12: u'Adaptive Micro Systems Inc.',
+   0x000E13: u'Accu-Sort Systems inc.',
+   0x000E14: u'Visionary Solutions, Inc.',
+   0x000E15: u'Tadlys LTD',
+   0x000E16: u'SouthWing',
+   0x000E17: u'PRIVATE',
+   0x000E18: u'MyA Technology',
+   0x000E19: u'LogicaCMG Pty Ltd',
+   0x000E1A: u'JPS Communications',
+   0x000E1B: u'IAV GmbH',
+   0x000E1C: u'Hach Company',
+   0x000E1D: u'ARION Technology Inc.',
+   0x000E1E: u'PRIVATE',
+   0x000E1F: u'TCL Networks Equipment Co., Ltd.',
+   0x000E20: u'PalmSource, Inc.',
+   0x000E21: u'MTU Friedrichshafen GmbH',
+   0x000E22: u'PRIVATE',
+   0x000E23: u'Incipient, Inc.',
+   0x000E24: u'Huwell Technology Inc.',
+   0x000E25: u'Hannae Technology Co., Ltd',
+   0x000E26: u'Gincom Technology Corp.',
+   0x000E27: u'Crere Networks, Inc.',
+   0x000E28: u'Dynamic Ratings P/L',
+   0x000E29: u'Shester Communications Inc',
+   0x000E2A: u'PRIVATE',
+   0x000E2B: u'Safari Technologies',
+   0x000E2C: u'Netcodec co.',
+   0x000E2D: u'Hyundai Digital Technology Co.,Ltd.',
+   0x000E2E: u'Edimax Technology Co., Ltd.',
+   0x000E2F: u'Disetronic Medical Systems AG',
+   0x000E30: u'AERAS Networks, Inc.',
+   0x000E31: u'Olympus BioSystems GmbH',
+   0x000E32: u'Kontron Medical',
+   0x000E33: u'Shuko Electronics Co.,Ltd',
+   0x000E34: u'NexGen City, LP',
+   0x000E35: u'Intel Corp',
+   0x000E36: u'HEINESYS, Inc.',
+   0x000E37: u'Harms & Wende GmbH & Co.KG',
+   0x000E38: u'Cisco Systems',
+   0x000E39: u'Cisco Systems',
+   0x000E3A: u'Cirrus Logic',
+   0x000E3B: u'Hawking Technologies, Inc.',
+   0x000E3C: u'TransAct Technoloiges Inc.',
+   0x000E3D: u'Televic N.V.',
+   0x000E3E: u'Sun Optronics Inc',
+   0x000E3F: u'Soronti, Inc.',
+   0x000E40: u'Nortel Networks',
+   0x000E41: u'NIHON MECHATRONICS CO.,LTD.',
+   0x000E42: u'Motic Incoporation Ltd.',
+   0x000E43: u'G-Tek Electronics Sdn. Bhd.',
+   0x000E44: u'Digital 5, Inc.',
+   0x000E45: u'Beijing Newtry Electronic Technology Ltd',
+   0x000E46: u'Niigata Seimitsu Co.,Ltd.',
+   0x000E47: u'NCI System Co.,Ltd.',
+   0x000E48: u'Lipman TransAction Solutions',
+   0x000E49: u'Forsway Scandinavia AB',
+   0x000E4A: u'Changchun Huayu WEBPAD Co.,LTD',
+   0x000E4B: u'atrium c and i',
+   0x000E4C: u'Bermai Inc.',
+   0x000E4D: u'Numesa Inc.',
+   0x000E4E: u'Waveplus Technology Co., Ltd.',
+   0x000E4F: u'Trajet GmbH',
+   0x000E50: u'Thomson Telecom Belgium',
+   0x000E51: u'tecna elettronica srl',
+   0x000E52: u'Optium Corporation',
+   0x000E53: u'AV TECH CORPORATION',
+   0x000E54: u'AlphaCell Wireless Ltd.',
+   0x000E55: u'AUVITRAN',
+   0x000E56: u'4G Systems GmbH',
+   0x000E57: u'Iworld Networking, Inc.',
+   0x000E58: u'Sonos, Inc.',
+   0x000E59: u'SAGEM SA',
+   0x000E5A: u'TELEFIELD inc.',
+   0x000E5B: u'ParkerVision - Direct2Data',
+   0x000E5C: u'Motorola BCS',
+   0x000E5D: u'Triple Play Technologies A/S',
+   0x000E5E: u'Beijing Raisecom Science & Technology Development Co.,Ltd',
+   0x000E5F: u'activ-net GmbH & Co. KG',
+   0x000E60: u'360SUN Digital Broadband Corporation',
+   0x000E61: u'MICROTROL LIMITED',
+   0x000E62: u'Nortel Networks',
+   0x000E63: u'Lemke Diagnostics GmbH',
+   0x000E64: u'Elphel, Inc',
+   0x000E65: u'TransCore',
+   0x000E66: u'Hitachi Advanced Digital, Inc.',
+   0x000E67: u'Eltis Microelectronics Ltd.',
+   0x000E68: u'E-TOP Network Technology Inc.',
+   0x000E69: u'China Electric Power Research Institute',
+   0x000E6A: u'3COM EUROPE LTD',
+   0x000E6B: u'Janitza electronics GmbH',
+   0x000E6C: u'Device Drivers Limited',
+   0x000E6D: u'Murata Manufacturing Co., Ltd.',
+   0x000E6E: u'MICRELEC  ELECTRONICS S.A',
+   0x000E6F: u'IRIS Corporation Berhad',
+   0x000E70: u'in2 Networks',
+   0x000E71: u'Gemstar Technology Development Ltd.',
+   0x000E72: u'CTS electronics',
+   0x000E73: u'Tpack A/S',
+   0x000E74: u'Solar Telecom. Tech',
+   0x000E75: u'New York Air Brake Corp.',
+   0x000E76: u'GEMSOC INNOVISION INC.',
+   0x000E77: u'Decru, Inc.',
+   0x000E78: u'Amtelco',
+   0x000E79: u'Ample Communications Inc.',
+   0x000E7A: u'GemWon Communications Co., Ltd.',
+   0x000E7B: u'Toshiba',
+   0x000E7C: u'Televes S.A.',
+   0x000E7D: u'Electronics Line 3000 Ltd.',
+   0x000E7E: u'Comprog Oy',
+   0x000E7F: u'Hewlett Packard',
+   0x000E80: u'Thomson Technology Inc',
+   0x000E81: u'Devicescape Software, Inc.',
+   0x000E82: u'Commtech Wireless',
+   0x000E83: u'Cisco Systems',
+   0x000E84: u'Cisco Systems',
+   0x000E85: u'Catalyst Enterprises, Inc.',
+   0x000E86: u'Alcatel North America',
+   0x000E87: u'adp Gauselmann GmbH',
+   0x000E88: u'VIDEOTRON CORP.',
+   0x000E89: u'CLEMATIC',
+   0x000E8A: u'Avara Technologies Pty. Ltd.',
+   0x000E8B: u'Astarte Technology Co, Ltd.',
+   0x000E8C: u'Siemens AG A&D ET',
+   0x000E8D: u'Systems in Progress Holding GmbH',
+   0x000E8E: u'SparkLAN Communications, Inc.',
+   0x000E8F: u'Sercomm Corp.',
+   0x000E90: u'PONICO CORP.',
+   0x000E91: u'Northstar Technologies',
+   0x000E92: u'Millinet Co., Ltd.',
+   0x000E93: u'Milénio 3 Sistemas Electrónicos, Lda.',
+   0x000E94: u'Maas International BV',
+   0x000E95: u'Fujiya Denki Seisakusho Co.,Ltd.',
+   0x000E96: u'Cubic Defense Applications, Inc.',
+   0x000E97: u'Ultracker Technology CO., Inc',
+   0x000E98: u'Vitec CC, INC.',
+   0x000E99: u'Spectrum Digital, Inc',
+   0x000E9A: u'BOE TECHNOLOGY GROUP CO.,LTD',
+   0x000E9B: u'Ambit Microsystems Corporation',
+   0x000E9C: u'Pemstar',
+   0x000E9D: u'Video Networks Ltd',
+   0x000E9E: u'Topfield Co., Ltd',
+   0x000E9F: u'TEMIC SDS GmbH',
+   0x000EA0: u'NetKlass Technology Inc.',
+   0x000EA1: u'Formosa Teletek Corporation',
+   0x000EA2: u'CyberGuard Corporation',
+   0x000EA3: u'CNCR-IT CO.,LTD,HangZhou P.R.CHINA',
+   0x000EA4: u'Certance Inc.',
+   0x000EA5: u'BLIP Systems',
+   0x000EA6: u'ASUSTEK COMPUTER INC.',
+   0x000EA7: u'Endace Inc Ltd.',
+   0x000EA8: u'United Technologists Europe Limited',
+   0x000EA9: u'Shanghai Xun Shi Communications Equipment Ltd. Co.',
+   0x000EAA: u'Scalent Systems, Inc.',
+   0x000EAB: u'OctigaBay Systems Corporation',
+   0x000EAC: u'MINTRON ENTERPRISE CO., LTD.',
+   0x000EAD: u'Metanoia Technologies, Inc.',
+   0x000EAE: u'GAWELL TECHNOLOGIES CORP.',
+   0x000EAF: u'CASTEL',
+   0x000EB0: u'Solutions Radio BV',
+   0x000EB1: u'Newcotech,Ltd',
+   0x000EB2: u'Micro-Research Finland Oy',
+   0x000EB3: u'LeftHand Networks',
+   0x000EB4: u'GUANGZHOU GAOKE COMMUNICATIONS TECHNOLOGY CO.LTD.',
+   0x000EB5: u'Ecastle Electronics Co., Ltd.',
+   0x000EB6: u'Riverbed Technology, Inc.',
+   0x000EB7: u'Knovative, Inc.',
+   0x000EB8: u'Iiga co.,Ltd',
+   0x000EB9: u'HASHIMOTO Electronics Industry Co.,Ltd.',
+   0x000EBA: u'HANMI SEMICONDUCTOR CO., LTD.',
+   0x000EBB: u'Everbee Networks',
+   0x000EBC: u'Cullmann GmbH',
+   0x000EBD: u'Burdick, a Quinton Compny',
+   0x000EBE: u'B&B Electronics Manufacturing Co.',
+   0x000EBF: u'Remsdaq Limited',
+   0x000EC0: u'Nortel Networks',
+   0x000EC1: u'MYNAH Technologies',
+   0x000EC2: u'Lowrance Electronics, Inc.',
+   0x000EC3: u'Logic Controls, Inc.',
+   0x000EC4: u'Iskra Transmission d.d.',
+   0x000EC5: u'Digital Multitools Inc',
+   0x000EC6: u'ASIX ELECTRONICS CORP.',
+   0x000EC7: u'Motorola Korea',
+   0x000EC8: u'Zoran Corporation',
+   0x000EC9: u'YOKO Technology Corp.',
+   0x000ECA: u'WTSS Inc',
+   0x000ECB: u'VineSys Technology',
+   0x000ECC: u'Tableau',
+   0x000ECD: u'SKOV A/S',
+   0x000ECE: u'S.I.T.T.I. S.p.A.',
+   0x000ECF: u'PROFIBUS Nutzerorganisation e.V.',
+   0x000ED0: u'Privaris, Inc.',
+   0x000ED1: u'Osaka Micro Computer.',
+   0x000ED2: u'Filtronic plc',
+   0x000ED3: u'Epicenter, Inc.',
+   0x000ED4: u'CRESITT INDUSTRIE',
+   0x000ED5: u'COPAN Systems Inc.',
+   0x000ED6: u'Cisco Systems',
+   0x000ED7: u'Cisco Systems',
+   0x000ED8: u'Aktino, Inc.',
+   0x000ED9: u'Aksys, Ltd.',
+   0x000EDA: u'C-TECH UNITED CORP.',
+   0x000EDB: u'XiNCOM Corp.',
+   0x000EDC: u'Tellion INC.',
+   0x000EDD: u'SHURE INCORPORATED',
+   0x000EDE: u'REMEC, Inc.',
+   0x000EDF: u'PLX Technology',
+   0x000EE0: u'Mcharge',
+   0x000EE1: u'ExtremeSpeed Inc.',
+   0x000EE2: u'Custom Engineering S.p.A.',
+   0x000EE3: u'Chiyu Technology Co.,Ltd',
+   0x000EE4: u'BOE TECHNOLOGY GROUP CO.,LTD',
+   0x000EE5: u'bitWallet, Inc.',
+   0x000EE6: u'Adimos Systems LTD',
+   0x000EE7: u'AAC ELECTRONICS CORP.',
+   0x000EE8: u'zioncom',
+   0x000EE9: u'WayTech Development, Inc.',
+   0x000EEA: u'Shadong Luneng Jicheng Electronics,Co.,Ltd',
+   0x000EEB: u'Sandmartin(zhong shan)Electronics Co.,Ltd',
+   0x000EEC: u'Orban',
+   0x000EED: u'Nokia Danmark A/S',
+   0x000EEE: u'Muco Industrie BV',
+   0x000EEF: u'PRIVATE',
+   0x000EF0: u'Festo AG & Co. KG',
+   0x000EF1: u'EZQUEST INC.',
+   0x000EF2: u'Infinico Corporation',
+   0x000EF3: u'Smarthome',
+   0x000EF4: u'Shenzhen Kasda Digital Technology Co.,Ltd',
+   0x000EF5: u'iPAC Technology Co., Ltd.',
+   0x000EF6: u'E-TEN Information Systems Co., Ltd.',
+   0x000EF7: u'Vulcan Portals Inc',
+   0x000EF8: u'SBC ASI',
+   0x000EF9: u'REA Elektronik GmbH',
+   0x000EFA: u'Optoway Technology Incorporation',
+   0x000EFB: u'Macey Enterprises',
+   0x000EFC: u'JTAG Technologies B.V.',
+   0x000EFD: u'FUJI PHOTO OPTICAL CO., LTD.',
+   0x000EFE: u'EndRun Technologies LLC',
+   0x000EFF: u'Megasolution,Inc.',
+   0x000F00: u'Legra Systems, Inc.',
+   0x000F01: u'DIGITALKS INC',
+   0x000F02: u'Digicube Technology Co., Ltd',
+   0x000F03: u'COM&C CO., LTD',
+   0x000F04: u'cim-usa inc',
+   0x000F05: u'3B SYSTEM INC.',
+   0x000F06: u'Nortel Networks',
+   0x000F07: u'Mangrove Systems, Inc.',
+   0x000F08: u'Indagon Oy',
+   0x000F09: u'PRIVATE',
+   0x000F0A: u'Clear Edge Networks',
+   0x000F0B: u'Kentima Technologies AB',
+   0x000F0C: u'SYNCHRONIC ENGINEERING',
+   0x000F0D: u'Hunt Electronic Co., Ltd.',
+   0x000F0E: u'WaveSplitter Technologies, Inc.',
+   0x000F0F: u'Real ID Technology Co., Ltd.',
+   0x000F10: u'RDM Corporation',
+   0x000F11: u'Prodrive B.V.',
+   0x000F12: u'Panasonic AVC Networks Germany GmbH',
+   0x000F13: u'Nisca corporation',
+   0x000F14: u'Mindray Co., Ltd.',
+   0x000F15: u'Kjaerulff1 A/S',
+   0x000F16: u'JAY HOW TECHNOLOGY CO.,',
+   0x000F17: u'Insta Elektro GmbH',
+   0x000F18: u'Industrial Control Systems',
+   0x000F19: u'Guidant Corporation',
+   0x000F1A: u'Gaming Support B.V.',
+   0x000F1B: u'Ego Systems Inc.',
+   0x000F1C: u'DigitAll World Co., Ltd',
+   0x000F1D: u'Cosmo Techs Co., Ltd.',
+   0x000F1E: u'Chengdu KT Electric Co.of High & New Technology',
+   0x000F1F: u'WW PCBA Test',
+   0x000F20: u'Hewlett Packard',
+   0x000F21: u'Scientific Atlanta, Inc',
+   0x000F22: u'Helius, Inc.',
+   0x000F23: u'Cisco Systems',
+   0x000F24: u'Cisco Systems',
+   0x000F25: u'AimValley B.V.',
+   0x000F26: u'WorldAccxx  LLC',
+   0x000F27: u'TEAL Electronics, Inc.',
+   0x000F28: u'Itronix Corporation',
+   0x000F29: u'Augmentix Corporation',
+   0x000F2A: u'Cableware Electronics',
+   0x000F2B: u'GREENBELL SYSTEMS',
+   0x000F2C: u'Uplogix, Inc.',
+   0x000F2D: u'CHUNG-HSIN ELECTRIC & MACHINERY MFG.CORP.',
+   0x000F2E: u'Megapower International Corp.',
+   0x000F2F: u'W-LINX TECHNOLOGY CO., LTD.',
+   0x000F30: u'Raza Microelectronics Inc',
+   0x000F31: u'Prosilica',
+   0x000F32: u'LuTong Electronic Technology Co.,Ltd',
+   0x000F33: u'DUALi Inc.',
+   0x000F34: u'Cisco Systems',
+   0x000F35: u'Cisco Systems',
+   0x000F36: u'Accurate Techhnologies, Inc.',
+   0x000F37: u'Xambala Incorporated',
+   0x000F38: u'Netstar',
+   0x000F39: u'IRIS SENSORS',
+   0x000F3A: u'HISHARP',
+   0x000F3B: u'Fuji System Machines Co., Ltd.',
+   0x000F3C: u'Endeleo Limited',
+   0x000F3D: u'D-Link Corporation',
+   0x000F3E: u'CardioNet, Inc',
+   0x000F3F: u'Big Bear Networks',
+   0x000F40: u'Optical Internetworking Forum',
+   0x000F41: u'Zipher Ltd',
+   0x000F42: u'Xalyo Systems',
+   0x000F43: u'Wasabi Systems Inc.',
+   0x000F44: u'Tivella Inc.',
+   0x000F45: u'Stretch, Inc.',
+   0x000F46: u'SINAR AG',
+   0x000F47: u'ROBOX SPA',
+   0x000F48: u'Polypix Inc.',
+   0x000F49: u'Northover Solutions Limited',
+   0x000F4A: u'Kyushu-kyohan co.,ltd',
+   0x000F4B: u'Katana Technology',
+   0x000F4C: u'Elextech INC',
+   0x000F4D: u'Centrepoint Technologies Inc.',
+   0x000F4E: u'Cellink',
+   0x000F4F: u'Cadmus Technology Ltd',
+   0x000F50: u'Baxall Limited',
+   0x000F51: u'Azul Systems, Inc.',
+   0x000F52: u'YORK Refrigeration, Marine & Controls',
+   0x000F53: u'Solarflare Communications Inc',
+   0x000F54: u'Entrelogic Corporation',
+   0x000F55: u'Datawire Communication Networks Inc.',
+   0x000F56: u'Continuum Photonics Inc',
+   0x000F57: u'CABLELOGIC Co., Ltd.',
+   0x000F58: u'Adder Technology Limited',
+   0x000F59: u'Phonak Communications AG',
+   0x000F5A: u'Peribit Networks',
+   0x000F5B: u'Delta Information Systems, Inc.',
+   0x000F5C: u'Day One Digital Media Limited',
+   0x000F5D: u'42Networks AB',
+   0x000F5E: u'Veo',
+   0x000F5F: u'Nicety Technologies Inc. (NTS)',
+   0x000F60: u'Lifetron Co.,Ltd',
+   0x000F61: u'Kiwi Networks',
+   0x000F62: u'Alcatel Bell Space N.V.',
+   0x000F63: u'Obzerv Technologies',
+   0x000F64: u'D&R Electronica Weesp BV',
+   0x000F65: u'icube Corp.',
+   0x000F66: u'Cisco-Linksys',
+   0x000F67: u'West Instruments',
+   0x000F68: u'Vavic Network Technology, Inc.',
+   0x000F69: u'SEW Eurodrive GmbH & Co. KG',
+   0x000F6A: u'Nortel Networks',
+   0x000F6B: u'GateWare Communications GmbH',
+   0x000F6C: u'ADDI-DATA GmbH',
+   0x000F6D: u'Midas Engineering',
+   0x000F6E: u'BBox',
+   0x000F6F: u'FTA Communication Technologies',
+   0x000F70: u'Wintec Industries, inc.',
+   0x000F71: u'Sanmei Electronics Co.,Ltd',
+   0x000F72: u'Sandburst',
+   0x000F73: u'Rockwell Samsung Automation',
+   0x000F74: u'Qamcom Technology AB',
+   0x000F75: u'First Silicon Solutions',
+   0x000F76: u'Digital Keystone, Inc.',
+   0x000F77: u'DENTUM CO.,LTD',
+   0x000F78: u'Datacap Systems Inc',
+   0x000F79: u'Bluetooth Interest Group Inc.',
+   0x000F7A: u'BeiJing NuQX Technology CO.,LTD',
+   0x000F7B: u'Arce Sistemas, S.A.',
+   0x000F7C: u'ACTi Corporation',
+   0x000F7D: u'Xirrus',
+   0x000F7E: u'Ablerex Electronics Co., LTD',
+   0x000F7F: u'UBSTORAGE Co.,Ltd.',
+   0x000F80: u'Trinity Security Systems,Inc.',
+   0x000F81: u'Secure Info Imaging',
+   0x000F82: u'Mortara Instrument, Inc.',
+   0x000F83: u'Brainium Technologies Inc.',
+   0x000F84: u'Astute Networks, Inc.',
+   0x000F85: u'ADDO-Japan Corporation',
+   0x000F86: u'Research In Motion Limited',
+   0x000F87: u'Maxcess International',
+   0x000F88: u'AMETEK, Inc.',
+   0x000F89: u'Winnertec System Co., Ltd.',
+   0x000F8A: u'WideView',
+   0x000F8B: u'Orion MultiSystems Inc',
+   0x000F8C: u'Gigawavetech Pte Ltd',
+   0x000F8D: u'FAST TV-Server AG',
+   0x000F8E: u'DONGYANG TELECOM CO.,LTD.',
+   0x000F8F: u'Cisco Systems',
+   0x000F90: u'Cisco Systems',
+   0x000F91: u'Aerotelecom Co.,Ltd.',
+   0x000F92: u'Microhard Systems Inc.',
+   0x000F93: u'Landis+Gyr Ltd.',
+   0x000F94: u'Genexis',
+   0x000F95: u'ELECOM Co.,LTD Laneed Division',
+   0x000F96: u'Critical Telecom Corp.',
+   0x000F97: u'Avanex Corporation',
+   0x000F98: u'Avamax Co. Ltd.',
+   0x000F99: u'APAC opto Electronics Inc.',
+   0x000F9A: u'Synchrony, Inc.',
+   0x000F9B: u'Ross Video Limited',
+   0x000F9C: u'Panduit Corp',
+   0x000F9D: u'Newnham Research Ltd',
+   0x000F9E: u'Murrelektronik GmbH',
+   0x000F9F: u'Motorola BCS',
+   0x000FA0: u'CANON KOREA BUSINESS SOLUTIONS INC.',
+   0x000FA1: u'Gigabit Systems Inc.',
+   0x000FA2: u'Digital Path Networks',
+   0x000FA3: u'Alpha Networks Inc.',
+   0x000FA4: u'Sprecher Automation GmbH',
+   0x000FA5: u'SMP / BWA Technology GmbH',
+   0x000FA6: u'S2 Security Corporation',
+   0x000FA7: u'Raptor Networks Technology',
+   0x000FA8: u'Photometrics, Inc.',
+   0x000FA9: u'PC Fabrik',
+   0x000FAA: u'Nexus Technologies',
+   0x000FAB: u'Kyushu Electronics Systems Inc.',
+   0x000FAC: u'IEEE 802.11',
+   0x000FAD: u'FMN communications GmbH',
+   0x000FAE: u'E2O Communications',
+   0x000FAF: u'Dialog Inc.',
+   0x000FB0: u'Compal Electronics,INC.',
+   0x000FB1: u'Cognio Inc.',
+   0x000FB2: u'Broadband Pacenet (India) Pvt. Ltd.',
+   0x000FB3: u'Actiontec Electronics, Inc',
+   0x000FB4: u'Timespace Technology',
+   0x000FB5: u'NETGEAR Inc',
+   0x000FB6: u'Europlex Technologies',
+   0x000FB7: u'Cavium Networks',
+   0x000FB8: u'CallURL Inc.',
+   0x000FB9: u'Adaptive Instruments',
+   0x000FBA: u'Tevebox AB',
+   0x000FBB: u'Siemens Networks GmbH & Co. KG',
+   0x000FBC: u'Onkey Technologies, Inc.',
+   0x000FBD: u'MRV Communications (Networks) LTD',
+   0x000FBE: u'e-w/you Inc.',
+   0x000FBF: u'DGT Sp. z o.o.',
+   0x000FC0: u'DELCOMp',
+   0x000FC1: u'WAVE Corporation',
+   0x000FC2: u'Uniwell Corporation',
+   0x000FC3: u'PalmPalm Technology, Inc.',
+   0x000FC4: u'NST co.,LTD.',
+   0x000FC5: u'KeyMed Ltd',
+   0x000FC6: u'Eurocom Industries A/S',
+   0x000FC7: u'Dionica R&D Ltd.',
+   0x000FC8: u'Chantry Networks',
+   0x000FC9: u'Allnet GmbH',
+   0x000FCA: u'A-JIN TECHLINE CO, LTD',
+   0x000FCB: u'3COM EUROPE LTD',
+   0x000FCC: u'Netopia, Inc.',
+   0x000FCD: u'Nortel Networks',
+   0x000FCE: u'Kikusui Electronics Corp.',
+   0x000FCF: u'Datawind Research',
+   0x000FD0: u'ASTRI',
+   0x000FD1: u'Applied Wireless Identifications Group, Inc.',
+   0x000FD2: u'EWA Technologies, Inc.',
+   0x000FD3: u'Digium',
+   0x000FD4: u'Soundcraft',
+   0x000FD5: u'Schwechat - RISE',
+   0x000FD6: u'Sarotech Co., Ltd',
+   0x000FD7: u'Harman Music Group',
+   0x000FD8: u'Force, Inc.',
+   0x000FD9: u'FlexDSL Telecommunications AG',
+   0x000FDA: u'YAZAKI CORPORATION',
+   0x000FDB: u'Westell Technologies',
+   0x000FDC: u'Ueda Japan  Radio Co., Ltd.',
+   0x000FDD: u'SORDIN AB',
+   0x000FDE: u'Sony Ericsson Mobile Communications AB',
+   0x000FDF: u'SOLOMON Technology Corp.',
+   0x000FE0: u'NComputing Co.,Ltd.',
+   0x000FE1: u'ID DIGITAL CORPORATION',
+   0x000FE2: u'Hangzhou Huawei-3Com Tech. Co., Ltd.',
+   0x000FE3: u'Damm Cellular Systems A/S',
+   0x000FE4: u'Pantech Co.,Ltd',
+   0x000FE5: u'MERCURY SECURITY CORPORATION',
+   0x000FE6: u'MBTech Systems, Inc.',
+   0x000FE7: u'Lutron Electronics Co., Inc.',
+   0x000FE8: u'Lobos, Inc.',
+   0x000FE9: u'GW TECHNOLOGIES CO.,LTD.',
+   0x000FEA: u'Giga-Byte Technology Co.,LTD.',
+   0x000FEB: u'Cylon Controls',
+   0x000FEC: u'Arkus Inc.',
+   0x000FED: u'Anam Electronics Co., Ltd',
+   0x000FEE: u'XTec, Incorporated',
+   0x000FEF: u'Thales e-Transactions GmbH',
+   0x000FF0: u'Sunray Enterprise',
+   0x000FF1: u'nex-G Systems Pte.Ltd',
+   0x000FF2: u'Loud Technologies Inc.',
+   0x000FF3: u'Jung Myoung Communications&Technology',
+   0x000FF4: u'Guntermann & Drunck GmbH',
+   0x000FF5: u'GN&S company',
+   0x000FF6: u'Darfon Electronics Corp.',
+   0x000FF7: u'Cisco Systems',
+   0x000FF8: u'Cisco  Systems',
+   0x000FF9: u'Valcretec, Inc.',
+   0x000FFA: u'Optinel Systems, Inc.',
+   0x000FFB: u'Nippon Denso Industry Co., Ltd.',
+   0x000FFC: u'Merit Li-Lin Ent.',
+   0x000FFD: u'Glorytek Network Inc.',
+   0x000FFE: u'G-PRO COMPUTER',
+   0x000FFF: u'Control4',
+   0x001000: u'CABLE TELEVISION LABORATORIES, INC.',
+   0x001001: u'MCK COMMUNICATIONS',
+   0x001002: u'ACTIA',
+   0x001003: u'IMATRON, INC.',
+   0x001004: u'THE BRANTLEY COILE COMPANY,INC',
+   0x001005: u'UEC COMMERCIAL',
+   0x001006: u'Thales Contact Solutions Ltd.',
+   0x001007: u'CISCO SYSTEMS, INC.',
+   0x001008: u'VIENNA SYSTEMS CORPORATION',
+   0x001009: u'HORO QUARTZ',
+   0x00100A: u'WILLIAMS COMMUNICATIONS GROUP',
+   0x00100B: u'CISCO SYSTEMS, INC.',
+   0x00100C: u'ITO CO., LTD.',
+   0x00100D: u'CISCO SYSTEMS, INC.',
+   0x00100E: u'MICRO LINEAR COPORATION',
+   0x00100F: u'INDUSTRIAL CPU SYSTEMS',
+   0x001010: u'INITIO CORPORATION',
+   0x001011: u'CISCO SYSTEMS, INC.',
+   0x001012: u'PROCESSOR SYSTEMS (I) PVT LTD',
+   0x001013: u'Kontron',
+   0x001014: u'CISCO SYSTEMS, INC.',
+   0x001015: u'OOmon Inc.',
+   0x001016: u'T.SQWARE',
+   0x001017: u'MICOS GmbH',
+   0x001018: u'BROADCOM CORPORATION',
+   0x001019: u'SIRONA DENTAL SYSTEMS GmbH & Co. KG',
+   0x00101A: u'PictureTel Corp.',
+   0x00101B: u'CORNET TECHNOLOGY, INC.',
+   0x00101C: u'OHM TECHNOLOGIES INTL, LLC',
+   0x00101D: u'WINBOND ELECTRONICS CORP.',
+   0x00101E: u'MATSUSHITA ELECTRONIC INSTRUMENTS CORP.',
+   0x00101F: u'CISCO SYSTEMS, INC.',
+   0x001020: u'WELCH ALLYN, DATA COLLECTION',
+   0x001021: u'ENCANTO NETWORKS, INC.',
+   0x001022: u'SatCom Media Corporation',
+   0x001023: u'FLOWWISE NETWORKS, INC.',
+   0x001024: u'NAGOYA ELECTRIC WORKS CO., LTD',
+   0x001025: u'GRAYHILL INC.',
+   0x001026: u'ACCELERATED NETWORKS, INC.',
+   0x001027: u'L-3 COMMUNICATIONS EAST',
+   0x001028: u'COMPUTER TECHNICA, INC.',
+   0x001029: u'CISCO SYSTEMS, INC.',
+   0x00102A: u'ZF MICROSYSTEMS, INC.',
+   0x00102B: u'UMAX DATA SYSTEMS, INC.',
+   0x00102C: u'Lasat Networks A/S',
+   0x00102D: u'HITACHI SOFTWARE ENGINEERING',
+   0x00102E: u'NETWORK SYSTEMS & TECHNOLOGIES PVT. LTD.',
+   0x00102F: u'CISCO SYSTEMS, INC.',
+   0x001030: u'EION Inc.',
+   0x001031: u'OBJECTIVE COMMUNICATIONS, INC.',
+   0x001032: u'ALTA TECHNOLOGY',
+   0x001033: u'ACCESSLAN COMMUNICATIONS, INC.',
+   0x001034: u'GNP Computers',
+   0x001035: u'ELITEGROUP COMPUTER SYSTEMS CO., LTD',
+   0x001036: u'INTER-TEL INTEGRATED SYSTEMS',
+   0x001037: u'CYQ\'ve Technology Co., Ltd.',
+   0x001038: u'MICRO RESEARCH INSTITUTE, INC.',
+   0x001039: u'Vectron Systems AG',
+   0x00103A: u'DIAMOND NETWORK TECH',
+   0x00103B: u'HIPPI NETWORKING FORUM',
+   0x00103C: u'IC ENSEMBLE, INC.',
+   0x00103D: u'PHASECOM, LTD.',
+   0x00103E: u'NETSCHOOLS CORPORATION',
+   0x00103F: u'TOLLGRADE COMMUNICATIONS, INC.',
+   0x001040: u'INTERMEC CORPORATION',
+   0x001041: u'BRISTOL BABCOCK, INC.',
+   0x001042: u'AlacriTech',
+   0x001043: u'A2 CORPORATION',
+   0x001044: u'InnoLabs Corporation',
+   0x001045: u'Nortel Networks',
+   0x001046: u'ALCORN MCBRIDE INC.',
+   0x001047: u'ECHO ELETRIC CO. LTD.',
+   0x001048: u'HTRC AUTOMATION, INC.',
+   0x001049: u'SHORELINE TELEWORKS, INC.',
+   0x00104A: u'THE PARVUC CORPORATION',
+   0x00104B: u'3COM CORPORATION',
+   0x00104C: u'COMPUTER ACCESS TECHNOLOGY',
+   0x00104D: u'SURTEC INDUSTRIES, INC.',
+   0x00104E: u'CEOLOGIC',
+   0x00104F: u'STORAGE TECHNOLOGY CORPORATION',
+   0x001050: u'RION CO., LTD.',
+   0x001051: u'CMICRO CORPORATION',
+   0x001052: u'METTLER-TOLEDO (ALBSTADT) GMBH',
+   0x001053: u'COMPUTER TECHNOLOGY CORP.',
+   0x001054: u'CISCO SYSTEMS, INC.',
+   0x001055: u'FUJITSU MICROELECTRONICS, INC.',
+   0x001056: u'SODICK CO., LTD.',
+   0x001057: u'Rebel.com, Inc.',
+   0x001058: u'ArrowPoint Communications',
+   0x001059: u'DIABLO RESEARCH CO. LLC',
+   0x00105A: u'3COM CORPORATION',
+   0x00105B: u'NET INSIGHT AB',
+   0x00105C: u'QUANTUM DESIGNS (H.K.) LTD.',
+   0x00105D: u'Draeger Medical',
+   0x00105E: u'HEKIMIAN LABORATORIES, INC.',
+   0x00105F: u'IN-SNEC',
+   0x001060: u'BILLIONTON SYSTEMS, INC.',
+   0x001061: u'HOSTLINK CORP.',
+   0x001062: u'NX SERVER, ILNC.',
+   0x001063: u'STARGUIDE DIGITAL NETWORKS',
+   0x001064: u'DNPG, LLC',
+   0x001065: u'RADYNE CORPORATION',
+   0x001066: u'ADVANCED CONTROL SYSTEMS, INC.',
+   0x001067: u'REDBACK NETWORKS, INC.',
+   0x001068: u'COMOS TELECOM',
+   0x001069: u'HELIOSS COMMUNICATIONS, INC.',
+   0x00106A: u'DIGITAL MICROWAVE CORPORATION',
+   0x00106B: u'SONUS NETWORKS, INC.',
+   0x00106C: u'INFRATEC PLUS GmbH',
+   0x00106D: u'Axxcelera Broadband Wireless',
+   0x00106E: u'TADIRAN COM. LTD.',
+   0x00106F: u'TRENTON TECHNOLOGY INC.',
+   0x001070: u'CARADON TREND LTD.',
+   0x001071: u'ADVANET INC.',
+   0x001072: u'GVN TECHNOLOGIES, INC.',
+   0x001073: u'TECHNOBOX, INC.',
+   0x001074: u'ATEN INTERNATIONAL CO., LTD.',
+   0x001075: u'Maxtor Corporation',
+   0x001076: u'EUREM GmbH',
+   0x001077: u'SAF DRIVE SYSTEMS, LTD.',
+   0x001078: u'NUERA COMMUNICATIONS, INC.',
+   0x001079: u'CISCO SYSTEMS, INC.',
+   0x00107A: u'AmbiCom, Inc.',
+   0x00107B: u'CISCO SYSTEMS, INC.',
+   0x00107C: u'P-COM, INC.',
+   0x00107D: u'AURORA COMMUNICATIONS, LTD.',
+   0x00107E: u'BACHMANN ELECTRONIC GmbH',
+   0x00107F: u'CRESTRON ELECTRONICS, INC.',
+   0x001080: u'METAWAVE COMMUNICATIONS',
+   0x001081: u'DPS, INC.',
+   0x001082: u'JNA TELECOMMUNICATIONS LIMITED',
+   0x001083: u'HEWLETT-PACKARD COMPANY',
+   0x001084: u'K-BOT COMMUNICATIONS',
+   0x001085: u'POLARIS COMMUNICATIONS, INC.',
+   0x001086: u'ATTO TECHNOLOGY, INC.',
+   0x001087: u'Xstreamis PLC',
+   0x001088: u'AMERICAN NETWORKS INC.',
+   0x001089: u'WebSonic',
+   0x00108A: u'TeraLogic, Inc.',
+   0x00108B: u'LASERANIMATION SOLLINGER GmbH',
+   0x00108C: u'FUJITSU TELECOMMUNICATIONS EUROPE, LTD.',
+   0x00108D: u'JOHNSON CONTROLS, INC.',
+   0x00108E: u'HUGH SYMONS CONCEPT Technologies Ltd.',
+   0x00108F: u'RAPTOR SYSTEMS',
+   0x001090: u'CIMETRICS, INC.',
+   0x001091: u'NO WIRES NEEDED BV',
+   0x001092: u'NETCORE INC.',
+   0x001093: u'CMS COMPUTERS, LTD.',
+   0x001094: u'Performance Analysis Broadband, Spirent plc',
+   0x001095: u'Thomson Inc.',
+   0x001096: u'TRACEWELL SYSTEMS, INC.',
+   0x001097: u'WinNet Metropolitan Communications Systems, Inc.',
+   0x001098: u'STARNET TECHNOLOGIES, INC.',
+   0x001099: u'InnoMedia, Inc.',
+   0x00109A: u'NETLINE',
+   0x00109B: u'Emulex Corporation',
+   0x00109C: u'M-SYSTEM CO., LTD.',
+   0x00109D: u'CLARINET SYSTEMS, INC.',
+   0x00109E: u'AWARE, INC.',
+   0x00109F: u'PAVO, INC.',
+   0x0010A0: u'INNOVEX TECHNOLOGIES, INC.',
+   0x0010A1: u'KENDIN SEMICONDUCTOR, INC.',
+   0x0010A2: u'TNS',
+   0x0010A3: u'OMNITRONIX, INC.',
+   0x0010A4: u'XIRCOM',
+   0x0010A5: u'OXFORD INSTRUMENTS',
+   0x0010A6: u'CISCO SYSTEMS, INC.',
+   0x0010A7: u'UNEX TECHNOLOGY CORPORATION',
+   0x0010A8: u'RELIANCE COMPUTER CORP.',
+   0x0010A9: u'ADHOC TECHNOLOGIES',
+   0x0010AA: u'MEDIA4, INC.',
+   0x0010AB: u'KOITO INDUSTRIES, LTD.',
+   0x0010AC: u'IMCI TECHNOLOGIES',
+   0x0010AD: u'SOFTRONICS USB, INC.',
+   0x0010AE: u'SHINKO ELECTRIC INDUSTRIES CO.',
+   0x0010AF: u'TAC SYSTEMS, INC.',
+   0x0010B0: u'MERIDIAN TECHNOLOGY CORP.',
+   0x0010B1: u'FOR-A CO., LTD.',
+   0x0010B2: u'COACTIVE AESTHETICS',
+   0x0010B3: u'NOKIA MULTIMEDIA TERMINALS',
+   0x0010B4: u'ATMOSPHERE NETWORKS',
+   0x0010B5: u'ACCTON TECHNOLOGY CORPORATION',
+   0x0010B6: u'ENTRATA COMMUNICATIONS CORP.',
+   0x0010B7: u'COYOTE TECHNOLOGIES, LLC',
+   0x0010B8: u'ISHIGAKI COMPUTER SYSTEM CO.',
+   0x0010B9: u'MAXTOR CORP.',
+   0x0010BA: u'MARTINHO-DAVIS SYSTEMS, INC.',
+   0x0010BB: u'DATA & INFORMATION TECHNOLOGY',
+   0x0010BC: u'Aastra Telecom',
+   0x0010BD: u'THE TELECOMMUNICATION TECHNOLOGY COMMITTEE',
+   0x0010BE: u'TELEXIS CORP.',
+   0x0010BF: u'InterAir Wireless',
+   0x0010C0: u'ARMA, INC.',
+   0x0010C1: u'OI ELECTRIC CO., LTD.',
+   0x0010C2: u'WILLNET, INC.',
+   0x0010C3: u'CSI-CONTROL SYSTEMS',
+   0x0010C4: u'MEDIA LINKS CO., LTD.',
+   0x0010C5: u'PROTOCOL TECHNOLOGIES, INC.',
+   0x0010C6: u'USI',
+   0x0010C7: u'DATA TRANSMISSION NETWORK',
+   0x0010C8: u'COMMUNICATIONS ELECTRONICS SECURITY GROUP',
+   0x0010C9: u'MITSUBISHI ELECTRONICS LOGISTIC SUPPORT CO.',
+   0x0010CA: u'INTEGRAL ACCESS',
+   0x0010CB: u'FACIT K.K.',
+   0x0010CC: u'CLP COMPUTER LOGISTIK PLANUNG GmbH',
+   0x0010CD: u'INTERFACE CONCEPT',
+   0x0010CE: u'VOLAMP, LTD.',
+   0x0010CF: u'FIBERLANE COMMUNICATIONS',
+   0x0010D0: u'WITCOM, LTD.',
+   0x0010D1: u'Top Layer Networks, Inc.',
+   0x0010D2: u'NITTO TSUSHINKI CO., LTD',
+   0x0010D3: u'GRIPS ELECTRONIC GMBH',
+   0x0010D4: u'STORAGE COMPUTER CORPORATION',
+   0x0010D5: u'IMASDE CANARIAS, S.A.',
+   0x0010D6: u'ITT - A/CD',
+   0x0010D7: u'ARGOSY RESEARCH INC.',
+   0x0010D8: u'CALISTA',
+   0x0010D9: u'IBM JAPAN, FUJISAWA MT+D',
+   0x0010DA: u'MOTION ENGINEERING, INC.',
+   0x0010DB: u'Juniper Networks, Inc.',
+   0x0010DC: u'MICRO-STAR INTERNATIONAL CO., LTD.',
+   0x0010DD: u'ENABLE SEMICONDUCTOR, INC.',
+   0x0010DE: u'INTERNATIONAL DATACASTING CORPORATION',
+   0x0010DF: u'RISE COMPUTER INC.',
+   0x0010E0: u'COBALT MICROSERVER, INC.',
+   0x0010E1: u'S.I. TECH, INC.',
+   0x0010E2: u'ArrayComm, Inc.',
+   0x0010E3: u'COMPAQ COMPUTER CORPORATION',
+   0x0010E4: u'NSI CORPORATION',
+   0x0010E5: u'SOLECTRON TEXAS',
+   0x0010E6: u'APPLIED INTELLIGENT SYSTEMS, INC.',
+   0x0010E7: u'BreezeCom',
+   0x0010E8: u'TELOCITY, INCORPORATED',
+   0x0010E9: u'RAIDTEC LTD.',
+   0x0010EA: u'ADEPT TECHNOLOGY',
+   0x0010EB: u'SELSIUS SYSTEMS, INC.',
+   0x0010EC: u'RPCG, LLC',
+   0x0010ED: u'SUNDANCE TECHNOLOGY, INC.',
+   0x0010EE: u'CTI PRODUCTS, INC.',
+   0x0010EF: u'DBTEL INCORPORATED',
+   0x0010F1: u'I-O CORPORATION',
+   0x0010F2: u'ANTEC',
+   0x0010F3: u'Nexcom International Co., Ltd.',
+   0x0010F4: u'VERTICAL NETWORKS, INC.',
+   0x0010F5: u'AMHERST SYSTEMS, INC.',
+   0x0010F6: u'CISCO SYSTEMS, INC.',
+   0x0010F7: u'IRIICHI TECHNOLOGIES Inc.',
+   0x0010F8: u'TEXIO CORPORATION',
+   0x0010F9: u'UNIQUE SYSTEMS, INC.',
+   0x0010FA: u'ZAYANTE, INC.',
+   0x0010FB: u'ZIDA TECHNOLOGIES LIMITED',
+   0x0010FC: u'BROADBAND NETWORKS, INC.',
+   0x0010FD: u'COCOM A/S',
+   0x0010FE: u'DIGITAL EQUIPMENT CORPORATION',
+   0x0010FF: u'CISCO SYSTEMS, INC.',
+   0x001100: u'RAM Industries, LLC',
+   0x001101: u'CET Technologies Pte Ltd',
+   0x001102: u'Aurora Multimedia Corp.',
+   0x001103: u'kawamura electric inc.',
+   0x001104: u'TELEXY',
+   0x001105: u'Sunplus Technology Co., Ltd.',
+   0x001106: u'Siemens NV (Belgium)',
+   0x001107: u'RGB Networks Inc.',
+   0x001108: u'Orbital Data Corporation',
+   0x001109: u'Micro-Star International',
+   0x00110A: u'Hewlett Packard',
+   0x00110B: u'Franklin Technology Systems',
+   0x00110C: u'Atmark Techno, Inc.',
+   0x00110D: u'SANBlaze Technology, Inc.',
+   0x00110E: u'Tsurusaki Sealand Transportation Co. Ltd.',
+   0x00110F: u'netplat,Inc.',
+   0x001110: u'Maxanna Technology Co., Ltd.',
+   0x001111: u'Intel Corporation',
+   0x001112: u'Honeywell CMSS',
+   0x001113: u'Fraunhofer FOKUS',
+   0x001114: u'EverFocus Electronics Corp.',
+   0x001115: u'EPIN Technologies, Inc.',
+   0x001116: u'COTEAU VERT CO., LTD.',
+   0x001117: u'CESNET',
+   0x001118: u'BLX IC Design Corp., Ltd.',
+   0x001119: u'Solteras, Inc.',
+   0x00111A: u'Motorola BCS',
+   0x00111B: u'Targa Systems Div L-3 Communications Canada',
+   0x00111C: u'Pleora Technologies Inc.',
+   0x00111D: u'Hectrix Limited',
+   0x00111E: u'EPSG (Ethernet Powerlink Standardization Group)',
+   0x00111F: u'Doremi Labs, Inc.',
+   0x001120: u'Cisco Systems',
+   0x001121: u'Cisco Systems',
+   0x001122: u'CIMSYS Inc',
+   0x001123: u'Appointech, Inc.',
+   0x001124: u'Apple Computer',
+   0x001125: u'IBM Corporation',
+   0x001126: u'Venstar Inc.',
+   0x001127: u'TASI, Inc',
+   0x001128: u'Streamit',
+   0x001129: u'Paradise Datacom Ltd.',
+   0x00112A: u'Niko NV',
+   0x00112B: u'NetModule',
+   0x00112C: u'IZT GmbH',
+   0x00112D: u'Guys Without Ties',
+   0x00112E: u'CEICOM',
+   0x00112F: u'ASUSTek Computer Inc.',
+   0x001130: u'Allied Telesis (Hong Kong) Ltd.',
+   0x001131: u'UNATECH. CO.,LTD',
+   0x001132: u'Synology Incorporated',
+   0x001133: u'Siemens Austria SIMEA',
+   0x001134: u'MediaCell, Inc.',
+   0x001135: u'Grandeye Ltd',
+   0x001136: u'Goodrich Sensor Systems',
+   0x001137: u'AICHI ELECTRIC CO., LTD.',
+   0x001138: u'TAISHIN CO., LTD.',
+   0x001139: u'STOEBER ANTRIEBSTECHNIK GmbH + Co. KG.',
+   0x00113A: u'SHINBORAM',
+   0x00113B: u'Micronet Communications Inc.',
+   0x00113C: u'Micronas GmbH',
+   0x00113D: u'KN SOLTEC CO.,LTD.',
+   0x00113E: u'JL Corporation',
+   0x00113F: u'Alcatel DI',
+   0x001140: u'Nanometrics Inc.',
+   0x001141: u'GoodMan Corporation',
+   0x001142: u'e-SMARTCOM  INC.',
+   0x001143: u'DELL INC.',
+   0x001144: u'Assurance Technology Corp',
+   0x001145: u'ValuePoint Networks',
+   0x001146: u'Telecard-Pribor Ltd',
+   0x001147: u'Secom-Industry co.LTD.',
+   0x001148: u'Prolon Control Systems',
+   0x001149: u'Proliphix LLC',
+   0x00114A: u'KAYABA INDUSTRY Co,.Ltd.',
+   0x00114B: u'Francotyp-Postalia AG & Co. KG',
+   0x00114C: u'caffeina applied research ltd.',
+   0x00114D: u'Atsumi Electric Co.,LTD.',
+   0x00114E: u'690885 Ontario Inc.',
+   0x00114F: u'US Digital Television, Inc',
+   0x001150: u'Belkin Corporation',
+   0x001151: u'Mykotronx',
+   0x001152: u'Eidsvoll Electronics AS',
+   0x001153: u'Trident Tek, Inc.',
+   0x001154: u'Webpro Technologies Inc.',
+   0x001155: u'Sevis Systems',
+   0x001156: u'Pharos Systems NZ',
+   0x001157: u'OF Networks Co., Ltd.',
+   0x001158: u'Nortel Networks',
+   0x001159: u'MATISSE NETWORKS INC',
+   0x00115A: u'Ivoclar Vivadent AG',
+   0x00115B: u'Elitegroup Computer System Co. (ECS)',
+   0x00115C: u'Cisco',
+   0x00115D: u'Cisco',
+   0x00115E: u'ProMinent Dosiertechnik GmbH',
+   0x00115F: u'Intellix Co., Ltd.',
+   0x001160: u'ARTDIO Company Co., LTD',
+   0x001161: u'NetStreams, LLC',
+   0x001162: u'STAR MICRONICS CO.,LTD.',
+   0x001163: u'SYSTEM SPA DEPT. ELECTRONICS',
+   0x001164: u'ACARD Technology Corp.',
+   0x001165: u'Znyx Networks',
+   0x001166: u'Taelim Electronics Co., Ltd.',
+   0x001167: u'Integrated System Solution Corp.',
+   0x001168: u'HomeLogic LLC',
+   0x001169: u'EMS Satcom',
+   0x00116A: u'Domo Ltd',
+   0x00116B: u'Digital Data Communications Asia Co.,Ltd',
+   0x00116C: u'Nanwang Multimedia Inc.,Ltd',
+   0x00116D: u'American Time and Signal',
+   0x00116E: u'PePLink Ltd.',
+   0x00116F: u'Netforyou Co., LTD.',
+   0x001170: u'GSC SRL',
+   0x001171: u'DEXTER Communications, Inc.',
+   0x001172: u'COTRON CORPORATION',
+   0x001173: u'Adtron Corporation',
+   0x001174: u'Wibhu Technologies, Inc.',
+   0x001175: u'PathScale, Inc.',
+   0x001176: u'Intellambda Systems, Inc.',
+   0x001177: u'COAXIAL NETWORKS, INC.',
+   0x001178: u'Chiron Technology Ltd',
+   0x001179: u'Singular Technology Co. Ltd.',
+   0x00117A: u'Singim International Corp.',
+   0x00117B: u'Büchi Labortechnik AG',
+   0x00117C: u'e-zy.net',
+   0x00117D: u'ZMD America, Inc.',
+   0x00117E: u'Progeny Inc.',
+   0x00117F: u'Neotune Information Technology Corporation,.LTD',
+   0x001180: u'Motorola BCS',
+   0x001181: u'InterEnergy Co.Ltd,',
+   0x001182: u'IMI Norgren Ltd',
+   0x001183: u'PSC Scanning, Inc',
+   0x001184: u'Humo Laboratory,Ltd.',
+   0x001185: u'Hewlett Packard',
+   0x001186: u'Prime Systems, Inc.',
+   0x001187: u'Category Solutions, Inc',
+   0x001188: u'Enterasys',
+   0x001189: u'Aerotech Inc',
+   0x00118A: u'Viewtran Technology Limited',
+   0x00118B: u'NetDevices Inc.',
+   0x00118C: u'Missouri Department of Transportation',
+   0x00118D: u'Hanchang System Corp.',
+   0x00118E: u'Halytech Mace',
+   0x00118F: u'EUTECH INSTRUMENTS PTE. LTD.',
+   0x001190: u'Digital Design Corporation',
+   0x001191: u'CTS-Clima Temperatur Systeme GmbH',
+   0x001192: u'Cisco Systems',
+   0x001193: u'Cisco Systems',
+   0x001194: u'Chi Mei Communication Systems, Inc.',
+   0x001195: u'D-Link Corporation',
+   0x001196: u'Actuality Systems, Inc.',
+   0x001197: u'Monitoring Technologies Limited',
+   0x001198: u'Prism Media Products Limited',
+   0x001199: u'2wcom GmbH',
+   0x00119A: u'Alkeria srl',
+   0x00119B: u'Telesynergy Research Inc.',
+   0x00119C: u'EP&T Energy',
+   0x00119D: u'Diginfo Technology Corporation',
+   0x00119E: u'Solectron Brazil',
+   0x00119F: u'Nokia Danmark A/S',
+   0x0011A0: u'Vtech Engineering Canada Ltd',
+   0x0011A1: u'VISION NETWARE CO.,LTD',
+   0x0011A2: u'Manufacturing Technology Inc',
+   0x0011A3: u'LanReady Technologies Inc.',
+   0x0011A4: u'JStream Technologies Inc.',
+   0x0011A5: u'Fortuna Electronic Corp.',
+   0x0011A6: u'Sypixx Networks',
+   0x0011A7: u'Infilco Degremont Inc.',
+   0x0011A8: u'Quest Technologies',
+   0x0011A9: u'MOIMSTONE Co., LTD',
+   0x0011AA: u'Uniclass Technology, Co., LTD',
+   0x0011AB: u'TRUSTABLE TECHNOLOGY CO.,LTD.',
+   0x0011AC: u'Simtec Electronics',
+   0x0011AD: u'Shanghai Ruijie Technology',
+   0x0011AE: u'Motorola BCS',
+   0x0011AF: u'Medialink-i,Inc',
+   0x0011B0: u'Fortelink Inc.',
+   0x0011B1: u'BlueExpert Technology Corp.',
+   0x0011B2: u'2001 Technology Inc.',
+   0x0011B3: u'YOSHIMIYA CO.,LTD.',
+   0x0011B4: u'Westermo Teleindustri AB',
+   0x0011B5: u'Shenzhen Powercom Co.,Ltd',
+   0x0011B6: u'Open Systems International',
+   0x0011B7: u'Melexis Nederland B.V.',
+   0x0011B8: u'Liebherr - Elektronik GmbH',
+   0x0011B9: u'Inner Range Pty. Ltd.',
+   0x0011BA: u'Elexol Pty Ltd',
+   0x0011BB: u'Cisco Systems',
+   0x0011BC: u'Cisco Systems',
+   0x0011BD: u'Bombardier Transportation',
+   0x0011BE: u'AGP Telecom Co. Ltd',
+   0x0011BF: u'AESYS S.p.A.',
+   0x0011C0: u'Aday Technology Inc',
+   0x0011C1: u'4P MOBILE DATA PROCESSING',
+   0x0011C2: u'United Fiber Optic Communication',
+   0x0011C3: u'Transceiving System Technology Corporation',
+   0x0011C4: u'Terminales de Telecomunicacion Terrestre, S.L.',
+   0x0011C5: u'TEN Technology',
+   0x0011C6: u'Seagate Technology LLC',
+   0x0011C7: u'RAYMARINE Group Ltd.',
+   0x0011C8: u'Powercom Co., Ltd.',
+   0x0011C9: u'MTT Corporation',
+   0x0011CA: u'Long Range Systems, Inc.',
+   0x0011CB: u'Jacobsons RKH AB',
+   0x0011CC: u'Guangzhou Jinpeng Group Co.,Ltd.',
+   0x0011CD: u'Axsun Technologies',
+   0x0011CE: u'Ubisense Limited',
+   0x0011CF: u'Thrane & Thrane A/S',
+   0x0011D0: u'Tandberg Data ASA',
+   0x0011D1: u'Soft Imaging System GmbH',
+   0x0011D2: u'Perception Digital Ltd',
+   0x0011D3: u'NextGenTel Holding ASA',
+   0x0011D4: u'NetEnrich, Inc',
+   0x0011D5: u'Hangzhou Sunyard System Engineering Co.,Ltd.',
+   0x0011D6: u'HandEra, Inc.',
+   0x0011D7: u'eWerks Inc',
+   0x0011D8: u'ASUSTek Computer Inc.',
+   0x0011D9: u'TiVo',
+   0x0011DA: u'Vivaas Technology Inc.',
+   0x0011DB: u'Land-Cellular Corporation',
+   0x0011DC: u'Glunz & Jensen',
+   0x0011DD: u'FROMUS TEC. Co., Ltd.',
+   0x0011DE: u'EURILOGIC',
+   0x0011DF: u'Arecont Systems',
+   0x0011E0: u'U-MEDIA Communications, Inc.',
+   0x0011E1: u'BEKO Electronics Co.',
+   0x0011E2: u'Hua Jung Components Co., Ltd.',
+   0x0011E3: u'Thomson, Inc.',
+   0x0011E4: u'Danelec Electronics A/S',
+   0x0011E5: u'KCodes Corporation',
+   0x0011E6: u'Scientific Atlanta',
+   0x0011E7: u'WORLDSAT - Texas de France',
+   0x0011E8: u'Tixi.Com',
+   0x0011E9: u'STARNEX CO., LTD.',
+   0x0011EA: u'IWICS Inc.',
+   0x0011EB: u'Innovative Integration',
+   0x0011EC: u'AVIX INC.',
+   0x0011ED: u'802 Global',
+   0x0011EE: u'Estari, Inc.',
+   0x0011EF: u'Conitec Datensysteme GmbH',
+   0x0011F0: u'Wideful Limited',
+   0x0011F1: u'QinetiQ Ltd',
+   0x0011F2: u'Institute of Network Technologies',
+   0x0011F3: u'Gavitec AG- mobile digit',
+   0x0011F4: u'woori-net',
+   0x0011F5: u'ASKEY COMPUTER CORP.',
+   0x0011F6: u'Asia Pacific Microsystems , Inc.',
+   0x0011F7: u'Shenzhen Forward Industry Co., Ltd',
+   0x0011F8: u'AIRAYA Corp',
+   0x0011F9: u'Nortel Networks',
+   0x0011FA: u'Rane Corporation',
+   0x0011FB: u'Heidelberg Engineering GmbH',
+   0x0011FC: u'HARTING Electric Gmbh & Co.KG',
+   0x0011FD: u'KORG INC.',
+   0x0011FE: u'Keiyo System Research, Inc.',
+   0x0011FF: u'Digitro Tecnologia Ltda',
+   0x001200: u'Cisco',
+   0x001201: u'Cisco',
+   0x001202: u'Audio International Inc.',
+   0x001203: u'Activ Networks',
+   0x001204: u'u10 Networks, Inc.',
+   0x001205: u'Terrasat Communications, Inc.',
+   0x001206: u'iQuest (NZ) Ltd',
+   0x001207: u'Head Strong International Limited',
+   0x001208: u'Gantner Electronic GmbH',
+   0x001209: u'Fastrax Ltd',
+   0x00120A: u'Emerson Electric GmbH & Co. OHG',
+   0x00120B: u'Chinasys Technologies Limited',
+   0x00120C: u'CE-Infosys Pte Ltd',
+   0x00120D: u'Advanced Telecommunication Technologies, Inc.',
+   0x00120E: u'AboCom',
+   0x00120F: u'IEEE 802.3',
+   0x001210: u'WideRay Corp',
+   0x001211: u'Protechna Herbst GmbH & Co. KG',
+   0x001212: u'PLUS Vision Corporation',
+   0x001213: u'Metrohm AG',
+   0x001214: u'Koenig & Bauer AG',
+   0x001215: u'iStor Networks, Inc.',
+   0x001216: u'ICP Internet Communication Payment AG',
+   0x001217: u'Cisco-Linksys, LLC',
+   0x001218: u'ARUZE Corporation',
+   0x001219: u'Ahead Communication Systems Inc',
+   0x00121A: u'Techno Soft Systemnics Inc.',
+   0x00121B: u'Sound Devices, LLC',
+   0x00121C: u'PARROT S.A.',
+   0x00121D: u'Netfabric Corporation',
+   0x00121E: u'Juniper Networks, Inc.',
+   0x00121F: u'Harding Intruments',
+   0x001220: u'Cadco Systems',
+   0x001221: u'B.Braun Melsungen AG',
+   0x001222: u'Skardin (UK) Ltd',
+   0x001223: u'Pixim',
+   0x001224: u'NexQL Corporation',
+   0x001225: u'Motorola BCS',
+   0x001226: u'Japan Direx Corporation',
+   0x001227: u'Franklin Electric Co., Inc.',
+   0x001228: u'Data Ltd.',
+   0x001229: u'BroadEasy Technologies Co.,Ltd',
+   0x00122A: u'VTech Telecommunications Ltd.',
+   0x00122B: u'Virbiage Pty Ltd',
+   0x00122C: u'Soenen Controls N.V.',
+   0x00122D: u'SiNett Corporation',
+   0x00122E: u'Signal Technology - AISD',
+   0x00122F: u'Sanei Electric Inc.',
+   0x001230: u'Picaso Infocommunication CO., LTD.',
+   0x001231: u'Motion Control Systems, Inc.',
+   0x001232: u'LeWiz Communications Inc.',
+   0x001233: u'JRC TOKKI Co.,Ltd.',
+   0x001234: u'Camille Bauer',
+   0x001235: u'Andrew Corporation',
+   0x001236: u'ConSentry Networks',
+   0x001237: u'Texas Instruments',
+   0x001238: u'SetaBox Technology Co., Ltd.',
+   0x001239: u'S Net Systems Inc.',
+   0x00123A: u'Posystech Inc., Co.',
+   0x00123B: u'KeRo Systems ApS',
+   0x00123C: u'IP3 Networks, Inc.',
+   0x00123D: u'GES',
+   0x00123E: u'ERUNE technology Co., Ltd.',
+   0x00123F: u'Dell Inc',
+   0x001240: u'AMOI ELECTRONICS CO.,LTD',
+   0x001241: u'a2i marketing center',
+   0x001242: u'Millennial Net',
+   0x001243: u'Cisco',
+   0x001244: u'Cisco',
+   0x001245: u'Zellweger Analytics, Inc.',
+   0x001246: u'T.O.M TECHNOLOGY INC..',
+   0x001247: u'Samsung Electronics Co., Ltd.',
+   0x001248: u'Kashya Inc.',
+   0x001249: u'Delta Elettronica S.p.A.',
+   0x00124A: u'Dedicated Devices, Inc.',
+   0x00124B: u'Chipcon AS',
+   0x00124C: u'BBWM Corporation',
+   0x00124D: u'Inducon BV',
+   0x00124E: u'XAC AUTOMATION CORP.',
+   0x00124F: u'Tyco Thermal Controls LLC.',
+   0x001250: u'Tokyo Aircaft Instrument Co., Ltd.',
+   0x001251: u'SILINK',
+   0x001252: u'Citronix, LLC',
+   0x001253: u'AudioDev AB',
+   0x001254: u'Spectra Technologies Holdings Company Ltd',
+   0x001255: u'NetEffect Incorporated',
+   0x001256: u'LG INFORMATION & COMM.',
+   0x001257: u'LeapComm Communication Technologies Inc.',
+   0x001258: u'Activis Polska',
+   0x001259: u'THERMO ELECTRON KARLSRUHE',
+   0x00125A: u'Microsoft Corporation',
+   0x00125B: u'KAIMEI ELECTRONI',
+   0x00125C: u'Green Hills Software, Inc.',
+   0x00125D: u'CyberNet Inc.',
+   0x00125E: u'CAEN',
+   0x00125F: u'AWIND Inc.',
+   0x001260: u'Stanton Magnetics,inc.',
+   0x001261: u'Adaptix, Inc',
+   0x001262: u'Nokia Danmark A/S',
+   0x001263: u'Data Voice Technologies GmbH',
+   0x001264: u'daum electronic gmbh',
+   0x001265: u'Enerdyne Technologies, Inc.',
+   0x001266: u'PRIVATE',
+   0x001267: u'Matsushita Electronic Components Co., Ltd.',
+   0x001268: u'IPS d.o.o.',
+   0x001269: u'Value Electronics',
+   0x00126A: u'OPTOELECTRONICS Co., Ltd.',
+   0x00126B: u'Ascalade Communications Limited',
+   0x00126C: u'Visonic Ltd.',
+   0x00126D: u'University of California, Berkeley',
+   0x00126E: u'Seidel Elektronik GmbH Nfg.KG',
+   0x00126F: u'Rayson Technology Co., Ltd.',
+   0x001270: u'NGES Denro Systems',
+   0x001271: u'Measurement Computing Corp',
+   0x001272: u'Redux Communications Ltd.',
+   0x001273: u'Stoke Inc',
+   0x001274: u'NIT lab',
+   0x001275: u'Moteiv Corporation',
+   0x001276: u'Microsol Holdings Ltd.',
+   0x001277: u'Korenix Technologies Co., Ltd.',
+   0x001278: u'International Bar Code',
+   0x001279: u'Hewlett Packard',
+   0x00127A: u'Sanyu Industry Co.,Ltd.',
+   0x00127B: u'VIA Networking Technologies, Inc.',
+   0x00127C: u'SWEGON AB',
+   0x00127D: u'MobileAria',
+   0x00127E: u'Digital Lifestyles Group, Inc.',
+   0x00127F: u'Cisco',
+   0x001280: u'Cisco',
+   0x001281: u'CIEFFE srl',
+   0x001282: u'Qovia',
+   0x001283: u'Nortel Networks',
+   0x001284: u'Lab33 Srl',
+   0x001285: u'Gizmondo Europe Ltd',
+   0x001286: u'ENDEVCO CORP',
+   0x001287: u'Digital Everywhere Unterhaltungselektronik GmbH',
+   0x001288: u'2Wire, Inc',
+   0x001289: u'Advance Sterilization Products',
+   0x00128A: u'Motorola PCS',
+   0x00128B: u'Sensory Networks Inc',
+   0x00128C: u'Woodward Governor',
+   0x00128D: u'STB Datenservice GmbH',
+   0x00128E: u'Q-Free ASA',
+   0x00128F: u'Montilio',
+   0x001290: u'KYOWA Electric & Machinery Corp.',
+   0x001291: u'KWS Computersysteme GmbH',
+   0x001292: u'Griffin Technology',
+   0x001293: u'GE Energy',
+   0x001294: u'Eudyna Devices Inc.',
+   0x001295: u'Aiware Inc.',
+   0x001296: u'Addlogix',
+   0x001297: u'O2Micro, Inc.',
+   0x001298: u'MICO ELECTRIC(SHENZHEN) LIMITED',
+   0x001299: u'Ktech Telecommunications Inc',
+   0x00129A: u'IRT Electronics Pty Ltd',
+   0x00129B: u'E2S Electronic Engineering Solutions, S.L.',
+   0x00129C: u'Yulinet',
+   0x00129D: u'FIRST INTERNATIONAL COMPUTER DO BRASIL LTDA',
+   0x00129E: u'Surf Communications Inc.',
+   0x00129F: u'RAE Systems, Inc.',
+   0x0012A0: u'NeoMeridian Sdn Bhd',
+   0x0012A1: u'BluePacket Communications Co., Ltd.',
+   0x0012A2: u'VITA',
+   0x0012A3: u'Trust International B.V.',
+   0x0012A4: u'ThingMagic, LLC',
+   0x0012A5: u'Stargen, Inc.',
+   0x0012A6: u'Lake Technology Ltd',
+   0x0012A7: u'ISR TECHNOLOGIES Inc',
+   0x0012A8: u'intec GmbH',
+   0x0012A9: u'3COM EUROPE LTD',
+   0x0012AA: u'IEE, Inc.',
+   0x0012AB: u'WiLife, Inc.',
+   0x0012AC: u'ONTIMETEK INC.',
+   0x0012AD: u'IDS GmbH',
+   0x0012AE: u'HLS HARD-LINE Solutions Inc.',
+   0x0012AF: u'ELPRO Technologies',
+   0x0012B0: u'Efore Oyj   (Plc)',
+   0x0012B1: u'Dai Nippon Printing Co., Ltd',
+   0x0012B2: u'AVOLITES LTD.',
+   0x0012B3: u'Advance Wireless Technology Corp.',
+   0x0012B4: u'Work GmbH',
+   0x0012B5: u'Vialta, Inc.',
+   0x0012B6: u'Santa Barbara Infrared, Inc.',
+   0x0012B7: u'PTW Freiburg',
+   0x0012B8: u'G2 Microsystems',
+   0x0012B9: u'Fusion Digital Technology',
+   0x0012BA: u'FSI Systems, Inc.',
+   0x0012BB: u'Telecommunications Industry Association TR-41 Committee',
+   0x0012BC: u'Echolab LLC',
+   0x0012BD: u'Avantec Manufacturing Limited',
+   0x0012BE: u'Astek Corporation',
+   0x0012BF: u'Arcadyan Technology Corporation',
+   0x0012C0: u'HotLava Systems, Inc.',
+   0x0012C1: u'Check Point Software Technologies',
+   0x0012C2: u'Apex Electronics Factory',
+   0x0012C3: u'WIT S.A.',
+   0x0012C4: u'Viseon, Inc.',
+   0x0012C5: u'V-Show Technology Co.Ltd',
+   0x0012C6: u'TGC America, Inc',
+   0x0012C7: u'SECURAY Technologies Ltd.Co.',
+   0x0012C8: u'Perfect tech',
+   0x0012C9: u'Motorola BCS',
+   0x0012CA: u'Hansen Telecom',
+   0x0012CB: u'CSS Inc.',
+   0x0012CC: u'Bitatek CO., LTD',
+   0x0012CD: u'ASEM SpA',
+   0x0012CE: u'Advanced Cybernetics Group',
+   0x0012CF: u'Accton Technology Corporation',
+   0x0012D0: u'Gossen-Metrawatt-GmbH',
+   0x0012D1: u'Texas Instruments Inc',
+   0x0012D2: u'Texas Instruments',
+   0x0012D3: u'Zetta Systems, Inc.',
+   0x0012D4: u'Princeton Technology, Ltd',
+   0x0012D5: u'Motion Reality Inc.',
+   0x0012D6: u'Jiangsu Yitong High-Tech Co.,Ltd',
+   0x0012D7: u'Invento Networks, Inc.',
+   0x0012D8: u'International Games System Co., Ltd.',
+   0x0012D9: u'Cisco Systems',
+   0x0012DA: u'Cisco Systems',
+   0x0012DB: u'ZIEHL industrie-elektronik GmbH + Co KG',
+   0x0012DC: u'SunCorp Industrial Limited',
+   0x0012DD: u'Shengqu Information Technology (Shanghai) Co., Ltd.',
+   0x0012DE: u'Radio Components Sweden AB',
+   0x0012DF: u'Novomatic AG',
+   0x0012E0: u'Codan Limited',
+   0x0012E1: u'Alliant Networks, Inc',
+   0x0012E2: u'ALAXALA Networks Corporation',
+   0x0012E3: u'Agat-RT, Ltd.',
+   0x0012E4: u'ZIEHL industrie-electronik GmbH + Co KG',
+   0x0012E5: u'Time America, Inc.',
+   0x0012E6: u'SPECTEC COMPUTER CO., LTD.',
+   0x0012E7: u'Projectek Networking Electronics Corp.',
+   0x0012E8: u'Fraunhofer IMS',
+   0x0012E9: u'Abbey Systems Ltd',
+   0x0012EA: u'Trane',
+   0x0012EB: u'R2DI, LLC',
+   0x0012EC: u'Movacolor b.v.',
+   0x0012ED: u'AVG Advanced Technologies',
+   0x0012EE: u'Sony Ericsson Mobile Communications AB',
+   0x0012EF: u'OneAccess SA',
+   0x0012F0: u'Intel Corporate',
+   0x0012F1: u'IFOTEC',
+   0x0012F2: u'Foundry Networks',
+   0x0012F3: u'connectBlue AB',
+   0x0012F4: u'Belco International Co.,Ltd.',
+   0x0012F5: u'Prolificx Ltd',
+   0x0012F6: u'MDK CO.,LTD.',
+   0x0012F7: u'Xiamen Xinglian Electronics Co., Ltd.',
+   0x0012F8: u'WNI Resources, LLC',
+   0x0012F9: u'URYU SEISAKU, LTD.',
+   0x0012FA: u'THX LTD',
+   0x0012FB: u'Samsung Electronics',
+   0x0012FC: u'PLANET System Co.,LTD',
+   0x0012FD: u'OPTIMUS IC S.A.',
+   0x0012FE: u'Lenovo Mobile Communication Technology Ltd.',
+   0x0012FF: u'Lely Industries N.V.',
+   0x001300: u'IT-FACTORY, INC.',
+   0x001301: u'IronGate S.L.',
+   0x001302: u'Intel Corporate',
+   0x001303: u'GateConnect Technologies GmbH',
+   0x001304: u'Flaircomm Technologies Co. LTD',
+   0x001305: u'Epicom, Inc.',
+   0x001306: u'Always On Wireless',
+   0x001307: u'Paravirtual Corporation',
+   0x001308: u'Nuvera Fuel Cells',
+   0x001309: u'Ocean Broadband Networks',
+   0x00130A: u'Nortel',
+   0x00130B: u'Mextal B.V.',
+   0x00130C: u'HF System Corporation',
+   0x00130D: u'GALILEO AVIONICA',
+   0x00130E: u'Focusrite Audio Engineering Limited',
+   0x00130F: u'EGEMEN Bilgisayar Muh San ve Tic LTD STI',
+   0x001310: u'Cisco-Linksys, LLC',
+   0x001311: u'ARRIS International',
+   0x001312: u'Amedia Networks Inc.',
+   0x001313: u'GuangZhou Post & Telecom Equipment ltd',
+   0x001314: u'Asiamajor Inc.',
+   0x001315: u'SONY Computer Entertainment inc,',
+   0x001316: u'L-S-B GmbH',
+   0x001317: u'GN Netcom as',
+   0x001318: u'DGSTATION Co., Ltd.',
+   0x001319: u'Cisco Systems',
+   0x00131A: u'Cisco Systems',
+   0x00131B: u'BeCell Innovations Corp.',
+   0x00131C: u'LiteTouch, Inc.',
+   0x00131D: u'Scanvaegt International A/S',
+   0x00131E: u'Peiker acustic GmbH & Co. KG',
+   0x00131F: u'NxtPhase T&D, Corp.',
+   0x001320: u'Intel Corporate',
+   0x001321: u'Hewlett Packard',
+   0x001322: u'DAQ Electronics, Inc.',
+   0x001323: u'Cap Co., Ltd.',
+   0x001324: u'Schneider Electric Ultra Terminal',
+   0x001325: u'ImmenStar Inc.',
+   0x001326: u'ECM Systems Ltd',
+   0x001327: u'Data Acquisitions limited',
+   0x001328: u'Westech Korea Inc.,',
+   0x001329: u'VSST Co., LTD',
+   0x00132A: u'STROM telecom, s. r. o.',
+   0x00132B: u'Phoenix Digital',
+   0x00132C: u'MAZ Brandenburg GmbH',
+   0x00132D: u'iWise Communications Pty Ltd',
+   0x00132E: u'ITian Coporation',
+   0x00132F: u'Interactek',
+   0x001330: u'EURO PROTECTION SURVEILLANCE',
+   0x001331: u'CellPoint Connect',
+   0x001332: u'Beijing Topsec Network Security Technology Co., Ltd.',
+   0x001333: u'Baud Technology Inc.',
+   0x001334: u'Arkados, Inc.',
+   0x001335: u'VS Industry Berhad',
+   0x001336: u'Tianjin 712 Communication Broadcasting co., ltd.',
+   0x001337: u'Orient Power Home Network Ltd.',
+   0x001338: u'FRESENIUS-VIAL',
+   0x001339: u'EL-ME AG',
+   0x00133A: u'VadaTech Inc.',
+   0x00133B: u'Speed Dragon Multimedia Limited',
+   0x00133C: u'QUINTRON SYSTEMS INC.',
+   0x00133D: u'Micro Memory LLC',
+   0x00133E: u'MetaSwitch',
+   0x00133F: u'Eppendorf Instrumente GmbH',
+   0x001340: u'AD.EL s.r.l.',
+   0x001341: u'Shandong New Beiyang Information Technology Co.,Ltd',
+   0x001342: u'Vision Research, Inc.',
+   0x001343: u'Matsushita Electronic Components (Europe) GmbH',
+   0x001344: u'Fargo Electronics Inc.',
+   0x001345: u'Eaton Corporation',
+   0x001346: u'D-Link Corporation',
+   0x001347: u'BlueTree Wireless Data Inc.',
+   0x001348: u'Artila Electronics Co., Ltd.',
+   0x001349: u'ZyXEL Communications Corporation',
+   0x00134A: u'Engim, Inc.',
+   0x00134B: u'ToGoldenNet Technology Inc.',
+   0x00134C: u'YDT Technology International',
+   0x00134D: u'IPC systems',
+   0x00134E: u'Valox Systems, Inc.',
+   0x00134F: u'Tranzeo Wireless Technologies Inc.',
+   0x001350: u'Silver Spring Networks, Inc',
+   0x001351: u'Niles Audio Corporation',
+   0x001352: u'Naztec, Inc.',
+   0x001353: u'HYDAC Filtertechnik GMBH',
+   0x001354: u'Zcomax Technologies, Inc.',
+   0x001355: u'TOMEN Cyber-business Solutions, Inc.',
+   0x001356: u'target systemelectronic gmbh',
+   0x001357: u'Soyal Technology Co., Ltd.',
+   0x001358: u'Realm Systems, Inc.',
+   0x001359: u'ProTelevision Technologies A/S',
+   0x00135A: u'Project T&E Limited',
+   0x00135B: u'PanelLink Cinema, LLC',
+   0x00135C: u'OnSite Systems, Inc.',
+   0x00135D: u'NTTPC Communications, Inc.',
+   0x00135E: u'EAB/RWI/K',
+   0x00135F: u'Cisco Systems',
+   0x001360: u'Cisco Systems',
+   0x001361: u'Biospace Co., Ltd.',
+   0x001362: u'ShinHeung Precision Co., Ltd.',
+   0x001363: u'Verascape, Inc.',
+   0x001364: u'Paradigm Technology Inc..',
+   0x001365: u'Nortel',
+   0x001366: u'Neturity Technologies Inc.',
+   0x001367: u'Narayon. Co., Ltd.',
+   0x001368: u'Maersk Data Defence',
+   0x001369: u'Honda Electron Co., LED.',
+   0x00136A: u'Hach Ultra Analytics',
+   0x00136B: u'E-TEC',
+   0x00136C: u'PRIVATE',
+   0x00136D: u'Tentaculus AB',
+   0x00136E: u'Techmetro Corp.',
+   0x00136F: u'PacketMotion, Inc.',
+   0x001370: u'Nokia Danmark A/S',
+   0x001371: u'Motorola CHS',
+   0x001372: u'Dell Inc.',
+   0x001373: u'BLwave Electronics Co., Ltd',
+   0x001374: u'Attansic Technology Corp.',
+   0x001375: u'American Security Products Co.',
+   0x001376: u'Tabor Electronics Ltd.',
+   0x001377: u'Samsung Electronics CO., LTD',
+   0x001378: u'QSAN Technology, Inc.',
+   0x001379: u'PONDER INFORMATION INDUSTRIES LTD.',
+   0x00137A: u'Netvox Technology Co., Ltd.',
+   0x00137B: u'Movon Corporation',
+   0x00137C: u'Kaicom co., Ltd.',
+   0x00137D: u'Dynalab, Inc.',
+   0x00137E: u'CorEdge Networks, Inc.',
+   0x00137F: u'Cisco Systems',
+   0x001380: u'Cisco Systems',
+   0x001381: u'CHIPS & Systems, Inc.',
+   0x001382: u'Cetacea Networks Corporation',
+   0x001383: u'Application Technologies and Engineering Research Laboratory',
+   0x001384: u'Advanced Motion Controls',
+   0x001385: u'Add-On Technology Co., LTD.',
+   0x001386: u'ABB Inc./Totalflow',
+   0x001387: u'27M Technologies AB',
+   0x001388: u'WiMedia Alliance',
+   0x001389: u'Redes de Telefonía Móvil S.A.',
+   0x00138A: u'QINGDAO GOERTEK ELECTRONICS CO.,LTD.',
+   0x00138B: u'Phantom Technologies LLC',
+   0x00138C: u'Kumyoung.Co.Ltd',
+   0x00138D: u'Kinghold',
+   0x00138E: u'FOAB Elektronik AB',
+   0x00138F: u'Asiarock Incorporation',
+   0x001390: u'Termtek Computer Co., Ltd',
+   0x001391: u'OUEN CO.,LTD.',
+   0x001392: u'Ruckus Wireless',
+   0x001393: u'Panta Systems, Inc.',
+   0x001394: u'Infohand Co.,Ltd',
+   0x001395: u'congatec AG',
+   0x001396: u'Acbel Polytech Inc.',
+   0x001397: u'Xsigo Systems, Inc.',
+   0x001398: u'TrafficSim Co.,Ltd',
+   0x001399: u'STAC Corporation.',
+   0x00139A: u'K-ubique ID Corp.',
+   0x00139B: u'ioIMAGE Ltd.',
+   0x00139C: u'Exavera Technologies, Inc.',
+   0x00139D: u'Design of Systems on Silicon S.A.',
+   0x00139E: u'Ciara Technologies Inc.',
+   0x00139F: u'Electronics Design Services, Co., Ltd.',
+   0x0013A0: u'ALGOSYSTEM Co., Ltd.',
+   0x0013A1: u'Crow Electronic Engeneering',
+   0x0013A2: u'MaxStream, Inc',
+   0x0013A3: u'Siemens Com CPE Devices',
+   0x0013A4: u'KeyEye Communications',
+   0x0013A5: u'General Solutions, LTD.',
+   0x0013A6: u'Extricom Ltd',
+   0x0013A7: u'BATTELLE MEMORIAL INSTITUTE',
+   0x0013A8: u'Tanisys Technology',
+   0x0013A9: u'Sony Corporation',
+   0x0013AA: u'ALS  & TEC Ltd.',
+   0x0013AB: u'Telemotive AG',
+   0x0013AC: u'Sunmyung Electronics Co., LTD',
+   0x0013AD: u'Sendo Ltd',
+   0x0013AE: u'Radiance Technologies',
+   0x0013AF: u'NUMA Technology,Inc.',
+   0x0013B0: u'Jablotron',
+   0x0013B1: u'Intelligent Control Systems (Asia) Pte Ltd',
+   0x0013B2: u'Carallon Limited',
+   0x0013B3: u'Beijing Ecom Communications Technology Co., Ltd.',
+   0x0013B4: u'Appear TV',
+   0x0013B5: u'Wavesat',
+   0x0013B6: u'Sling Media, Inc.',
+   0x0013B7: u'Scantech ID',
+   0x0013B8: u'RyCo Electronic Systems Limited',
+   0x0013B9: u'BM SPA',
+   0x0013BA: u'ReadyLinks Inc',
+   0x0013BB: u'PRIVATE',
+   0x0013BC: u'Artimi Ltd',
+   0x0013BD: u'HYMATOM SA',
+   0x0013BE: u'Virtual Conexions',
+   0x0013BF: u'Media System Planning Corp.',
+   0x0013C0: u'Trix Tecnologia Ltda.',
+   0x0013C1: u'Asoka USA Corporation',
+   0x0013C2: u'WACOM Co.,Ltd',
+   0x0013C3: u'Cisco Systems',
+   0x0013C4: u'Cisco Systems',
+   0x0013C5: u'LIGHTRON FIBER-OPTIC DEVICES INC.',
+   0x0013C6: u'OpenGear, Inc',
+   0x0013C7: u'IONOS Co.,Ltd.',
+   0x0013C8: u'PIRELLI BROADBAND SOLUTIONS S.P.A.',
+   0x0013C9: u'Beyond Achieve Enterprises Ltd.',
+   0x0013CA: u'X-Digital Systems, Inc.',
+   0x0013CB: u'Zenitel Norway AS',
+   0x0013CC: u'Tall Maple Systems',
+   0x0013CD: u'MTI co. LTD',
+   0x0013CE: u'Intel Corporate',
+   0x0013CF: u'4Access Communications',
+   0x0013D0: u'e-San Limited',
+   0x0013D1: u'KIRK telecom A/S',
+   0x0013D2: u'PAGE IBERICA, S.A.',
+   0x0013D3: u'MICRO-STAR INTERNATIONAL CO., LTD.',
+   0x0013D4: u'ASUSTek COMPUTER INC.',
+   0x0013D5: u'WiNetworks LTD',
+   0x0013D6: u'TII NETWORK TECHNOLOGIES, INC.',
+   0x0013D7: u'SPIDCOM Technologies SA',
+   0x0013D8: u'Princeton Instruments',
+   0x0013D9: u'Matrix Product Development, Inc.',
+   0x0013DA: u'Diskware Co., Ltd',
+   0x0013DB: u'SHOEI Electric Co.,Ltd',
+   0x0013DC: u'IBTEK INC.',
+   0x0013DD: u'Abbott Diagnostics',
+   0x0013DE: u'Adapt4',
+   0x0013DF: u'Ryvor Corp.',
+   0x0013E0: u'Murata Manufacturing Co., Ltd.',
+   0x0013E1: u'Iprobe',
+   0x0013E2: u'GeoVision Inc.',
+   0x0013E3: u'CoVi Technologies, Inc.',
+   0x0013E4: u'YANGJAE SYSTEMS CORP.',
+   0x0013E5: u'TENOSYS, INC.',
+   0x0013E6: u'Technolution',
+   0x0013E7: u'Minelab Electronics Pty Limited',
+   0x0013E8: u'Intel Corporate',
+   0x0013E9: u'VeriWave, Inc.',
+   0x0013EA: u'Kamstrup A/S',
+   0x0013EB: u'Sysmaster Corporation',
+   0x0013EC: u'Sunbay Software AG',
+   0x0013ED: u'PSIA',
+   0x0013EE: u'JBX Designs Inc.',
+   0x0013EF: u'Kingjon Digital Technology Co.,Ltd',
+   0x0013F0: u'Wavefront Semiconductor',
+   0x0013F1: u'AMOD Technology Co., Ltd.',
+   0x0013F2: u'Klas Ltd',
+   0x0013F3: u'Giga-byte Communications Inc.',
+   0x0013F4: u'Psitek (Pty) Ltd',
+   0x0013F5: u'Akimbi Systems',
+   0x0013F6: u'Cintech',
+   0x0013F7: u'SMC Networks, Inc.',
+   0x0013F8: u'Dex Security Solutions',
+   0x0013F9: u'Cavera Systems',
+   0x0013FA: u'LifeSize Communications, Inc',
+   0x0013FB: u'RKC INSTRUMENT INC.',
+   0x0013FC: u'SiCortex, Inc',
+   0x0013FD: u'Nokia Danmark A/S',
+   0x0013FE: u'GRANDTEC ELECTRONIC CORP.',
+   0x0013FF: u'Dage-MTI of MC, Inc.',
+   0x001400: u'MINERVA KOREA CO., LTD',
+   0x001401: u'Rivertree Networks Corp.',
+   0x001402: u'kk-electronic a/s',
+   0x001403: u'Renasis, LLC',
+   0x001404: u'Motorola CHS',
+   0x001405: u'OpenIB, Inc.',
+   0x001406: u'Go Networks',
+   0x001407: u'Biosystems',
+   0x001408: u'Eka Systems Inc.',
+   0x001409: u'MAGNETI MARELLI   S.E. S.p.A.',
+   0x00140A: u'WEPIO Co., Ltd.',
+   0x00140B: u'FIRST INTERNATIONAL COMPUTER, INC.',
+   0x00140C: u'GKB CCTV CO., LTD.',
+   0x00140D: u'Nortel',
+   0x00140E: u'Nortel',
+   0x00140F: u'Federal State Unitary Enterprise Leningrad R&D Institute of',
+   0x001410: u'Suzhou Keda Technology CO.,Ltd',
+   0x001411: u'Deutschmann Automation GmbH & Co. KG',
+   0x001412: u'S-TEC electronics AG',
+   0x001413: u'Trebing & Himstedt Prozessautomation GmbH & Co. KG',
+   0x001414: u'Jumpnode Systems LLC.',
+   0x001415: u'Intec Automation Inc.',
+   0x001416: u'Scosche Industries, Inc.',
+   0x001417: u'RSE Informations Technologie GmbH',
+   0x001418: u'C4Line',
+   0x001419: u'SIDSA',
+   0x00141A: u'DEICY CORPORATION',
+   0x00141B: u'Cisco Systems',
+   0x00141C: u'Cisco Systems',
+   0x00141D: u'Lust Antriebstechnik GmbH',
+   0x00141E: u'P.A. Semi, Inc.',
+   0x00141F: u'SunKwang Electronics Co., Ltd',
+   0x001420: u'G-Links networking company',
+   0x001421: u'Total Wireless Technologies Pte. Ltd.',
+   0x001422: u'Dell Inc.',
+   0x001423: u'J-S Co. NEUROCOM',
+   0x001424: u'Merry Electrics CO., LTD.',
+   0x001425: u'Galactic Computing Corp.',
+   0x001426: u'NL Technology',
+   0x001427: u'JazzMutant',
+   0x001428: u'Vocollect, Inc',
+   0x001429: u'V Center Technologies Co., Ltd.',
+   0x00142A: u'Elitegroup Computer System Co., Ltd',
+   0x00142B: u'Edata Technologies Inc.',
+   0x00142C: u'Koncept International, Inc.',
+   0x00142D: u'Toradex AG',
+   0x00142E: u'77 Elektronika Kft.',
+   0x00142F: u'WildPackets',
+   0x001430: u'ViPowER, Inc',
+   0x001431: u'PDL Electronics Ltd',
+   0x001432: u'Tarallax Wireless, Inc.',
+   0x001433: u'Empower Technologies(Canada) Inc.',
+   0x001434: u'Keri Systems, Inc',
+   0x001435: u'CityCom Corp.',
+   0x001436: u'Qwerty Elektronik AB',
+   0x001437: u'GSTeletech Co.,Ltd.',
+   0x001438: u'Hewlett Packard',
+   0x001439: u'Blonder Tongue Laboratories, Inc.',
+   0x00143A: u'RAYTALK INTERNATIONAL SRL',
+   0x00143B: u'Sensovation AG',
+   0x00143C: u'Oerlikon Contraves Inc.',
+   0x00143D: u'Aevoe Inc.',
+   0x00143E: u'AirLink Communications, Inc.',
+   0x00143F: u'Hotway Technology Corporation',
+   0x001440: u'ATOMIC Corporation',
+   0x001441: u'Innovation Sound Technology Co., LTD.',
+   0x001442: u'ATTO CORPORATION',
+   0x001443: u'Consultronics Europe Ltd',
+   0x001444: u'Grundfos Electronics',
+   0x001445: u'Telefon-Gradnja d.o.o.',
+   0x001446: u'KidMapper, Inc.',
+   0x001447: u'BOAZ Inc.',
+   0x001448: u'Inventec Multimedia & Telecom Corporation',
+   0x001449: u'Sichuan Changhong Electric Ltd.',
+   0x00144A: u'Taiwan Thick-Film Ind. Corp.',
+   0x00144B: u'Hifn, Inc.',
+   0x00144C: u'General Meters Corp.',
+   0x00144D: u'Intelligent Systems',
+   0x00144E: u'SRISA',
+   0x00144F: u'Sun Microsystems, Inc.',
+   0x001450: u'Heim Systems GmbH',
+   0x001451: u'Apple Computer Inc.',
+   0x001452: u'CALCULEX,INC.',
+   0x001453: u'ADVANTECH TECHNOLOGIES CO.,LTD',
+   0x001454: u'Symwave',
+   0x001455: u'Coder Electronics Corporation',
+   0x001456: u'Edge Products',
+   0x001457: u'T-VIPS AS',
+   0x001458: u'HS Automatic ApS',
+   0x001459: u'Moram Co., Ltd.',
+   0x00145A: u'Elektrobit AG',
+   0x00145B: u'SeekerNet Inc.',
+   0x00145C: u'Intronics B.V.',
+   0x00145D: u'WJ Communications, Inc.',
+   0x00145E: u'IBM',
+   0x00145F: u'ADITEC CO. LTD',
+   0x001460: u'Kyocera Wireless Corp.',
+   0x001461: u'CORONA CORPORATION',
+   0x001462: u'Digiwell Technology, inc',
+   0x001463: u'IDCS N.V.',
+   0x001464: u'Cryptosoft',
+   0x001465: u'Novo Nordisk A/S',
+   0x001466: u'Kleinhenz Elektronik GmbH',
+   0x001467: u'ArrowSpan Inc.',
+   0x001468: u'CelPlan International, Inc.',
+   0x001469: u'Cisco Systems',
+   0x00146A: u'Cisco Systems',
+   0x00146B: u'Anagran, Inc.',
+   0x00146C: u'Netgear Inc.',
+   0x00146D: u'RF Technologies',
+   0x00146E: u'H. Stoll GmbH & Co. KG',
+   0x00146F: u'Kohler Co',
+   0x001470: u'Prokom Software SA',
+   0x001471: u'Eastern Asia Technology Limited',
+   0x001472: u'China Broadband Wireless IP Standard Group',
+   0x001473: u'Bookham Inc',
+   0x001474: u'K40 Electronics',
+   0x001475: u'Wiline Networks, Inc.',
+   0x001476: u'MultiCom Industries Limited',
+   0x001477: u'Nertec  Inc.',
+   0x001478: u'ShenZhen TP-LINK Technologies Co., Ltd.',
+   0x001479: u'NEC Magnus Communications,Ltd.',
+   0x00147A: u'Eubus GmbH',
+   0x00147B: u'Iteris, Inc.',
+   0x00147C: u'3Com Europe Ltd',
+   0x00147D: u'Aeon Digital International',
+   0x00147E: u'PanGo Networks, Inc.',
+   0x00147F: u'Thomson Telecom Belgium',
+   0x001480: u'Hitachi-LG Data Storage Korea, Inc',
+   0x001481: u'Multilink Inc',
+   0x001482: u'GoBackTV, Inc',
+   0x001483: u'eXS Inc.',
+   0x001484: u'CERMATE TECHNOLOGIES INC',
+   0x001485: u'Giga-Byte',
+   0x001486: u'Echo Digital Audio Corporation',
+   0x001487: u'American Technology Integrators',
+   0x001488: u'Akorri Networks',
+   0x001489: u'B15402100 - JANDEI, S.L.',
+   0x00148A: u'Elin Ebg Traction Gmbh',
+   0x00148B: u'Globo Electronic GmbH & Co. KG',
+   0x00148C: u'Fortress Technologies',
+   0x00148D: u'Cubic Defense Simulation Systems',
+   0x00148E: u'Tele Power Inc.',
+   0x00148F: u'Protronic (Far East) Ltd.',
+   0x001490: u'ASP Corporation',
+   0x001491: u'Daniels Electronics Ltd.',
+   0x001492: u'Liteon, Mobile Media Solution SBU',
+   0x001493: u'Systimax Solutions',
+   0x001494: u'ESU AG',
+   0x001495: u'2Wire, Inc.',
+   0x001496: u'Phonic Corp.',
+   0x001497: u'ZHIYUAN Eletronics co.,ltd.',
+   0x001498: u'Viking Design Technology',
+   0x001499: u'Helicomm Inc',
+   0x00149A: u'Motorola Mobile Devices Business',
+   0x00149B: u'Nokota Communications, LLC',
+   0x00149C: u'HF Company',
+   0x00149D: u'Sound ID Inc.',
+   0x00149E: u'UbONE Co., Ltd',
+   0x00149F: u'System and Chips, Inc.',
+   0x0014A0: u'RFID Asset Track, Inc.',
+   0x0014A1: u'Synchronous Communication Corp',
+   0x0014A2: u'Core Micro Systems Inc.',
+   0x0014A3: u'Vitelec BV',
+   0x0014A4: u'Hon Hai Precision Ind. Co., Ltd.',
+   0x0014A5: u'Gemtek Technology Co., Ltd.',
+   0x0014A6: u'Teranetics, Inc.',
+   0x0014A7: u'Nokia Danmark A/S',
+   0x0014A8: u'Cisco Systems',
+   0x0014A9: u'Cisco Systems',
+   0x0014AA: u'Ashly Audio, Inc.',
+   0x0014AB: u'Senhai Electronic Technology Co., Ltd.',
+   0x0014AC: u'Bountiful WiFi',
+   0x0014AD: u'Gassner Wiege- u. Meßtechnik GmbH',
+   0x0014AE: u'Wizlogics Co., Ltd.',
+   0x0014AF: u'Datasym Inc.',
+   0x0014B0: u'Naeil Community',
+   0x0014B1: u'Avitec AB',
+   0x0014B2: u'mCubelogics Corporation',
+   0x0014B3: u'CoreStar International Corp',
+   0x0014B4: u'General Dynamics United Kingdom Ltd',
+   0x0014B5: u'PRIVATE',
+   0x0014B6: u'Enswer Technology Inc.',
+   0x0014B7: u'AR Infotek Inc.',
+   0x0014B8: u'Hill-Rom',
+   0x0014B9: u'STEPMIND',
+   0x0014BA: u'Carvers SA de CV',
+   0x0014BB: u'Open Interface North America',
+   0x0014BC: u'SYNECTIC TELECOM EXPORTS PVT. LTD.',
+   0x0014BD: u'incNETWORKS, Inc',
+   0x0014BE: u'Wink communication technology CO.LTD',
+   0x0014BF: u'Cisco-Linksys LLC',
+   0x0014C0: u'Symstream Technology Group Ltd',
+   0x0014C1: u'U.S. Robotics Corporation',
+   0x0014C2: u'Hewlett Packard',
+   0x0014C3: u'Seagate Technology LLC',
+   0x0014C4: u'Vitelcom Mobile Technology',
+   0x0014C5: u'Alive Technologies Pty Ltd',
+   0x0014C6: u'Quixant Ltd',
+   0x0014C7: u'Nortel',
+   0x0014C8: u'Contemporary Research Corp',
+   0x0014C9: u'Silverback Systems, Inc.',
+   0x0014CA: u'Key Radio Systems Limited',
+   0x0014CB: u'LifeSync Corporation',
+   0x0014CC: u'Zetec, Inc.',
+   0x0014CD: u'DigitalZone Co., Ltd.',
+   0x0014CE: u'NF CORPORATION',
+   0x0014CF: u'Nextlink.to A/S',
+   0x0014D0: u'BTI Photonics',
+   0x0014D1: u'TRENDware International, Inc.',
+   0x0014D2: u'KYUKI CORPORATION',
+   0x0014D3: u'SEPSA',
+   0x0014D4: u'K Technology Corporation',
+   0x0014D5: u'Datang Telecom Technology CO. , LCD,Optical Communication Br',
+   0x0014D6: u'Jeongmin Electronics Co.,Ltd.',
+   0x0014D7: u'DataStor Technology Inc.',
+   0x0014D8: u'bio-logic SA',
+   0x0014D9: u'IP Fabrics, Inc.',
+   0x0014DA: u'Huntleigh Healthcare',
+   0x0014DB: u'Elma Trenew Electronic GmbH',
+   0x0014DC: u'Communication System Design & Manufacturing (CSDM)',
+   0x0014DD: u'Covergence Inc.',
+   0x0014DE: u'Sage Instruments Inc.',
+   0x0014DF: u'HI-P Tech Corporation',
+   0x0014E0: u'LET\'S Corporation',
+   0x0014E1: u'Data Display AG',
+   0x0014E2: u'datacom systems inc.',
+   0x0014E3: u'mm-lab GmbH',
+   0x0014E4: u'Integral Technologies',
+   0x0014E5: u'Alticast',
+   0x0014E6: u'AIM Infrarotmodule GmbH',
+   0x0014E7: u'Stolinx,. Inc',
+   0x0014E8: u'Motorola CHS',
+   0x0014E9: u'Nortech International',
+   0x0014EA: u'S Digm Inc. (Safe Paradigm Inc.)',
+   0x0014EB: u'AwarePoint Corporation',
+   0x0014EC: u'Acro Telecom',
+   0x0014ED: u'Airak, Inc.',
+   0x0014EE: u'Western Digital Technologies, Inc.',
+   0x0014EF: u'TZero Technologies, Inc.',
+   0x0014F0: u'Business Security OL AB',
+   0x0014F1: u'Cisco Systems',
+   0x0014F2: u'Cisco Systems',
+   0x0014F3: u'ViXS Systems Inc',
+   0x0014F4: u'DekTec Digital Video B.V.',
+   0x0014F5: u'OSI Security Devices',
+   0x0014F6: u'Juniper Networks, Inc.',
+   0x0014F7: u'Crevis',
+   0x0014F8: u'Scientific Atlanta',
+   0x0014F9: u'Vantage Controls',
+   0x0014FA: u'AsGa S.A.',
+   0x0014FB: u'Technical Solutions Inc.',
+   0x0014FC: u'Extandon, Inc.',
+   0x0014FD: u'Thecus Technology Corp.',
+   0x0014FE: u'Artech Electronics',
+   0x0014FF: u'Precise Automation, LLC',
+   0x001500: u'Intel Corporate',
+   0x001501: u'LexBox',
+   0x001502: u'BETA tech',
+   0x001503: u'PROFIcomms s.r.o.',
+   0x001504: u'GAME PLUS CO., LTD.',
+   0x001505: u'Actiontec Electronics, Inc',
+   0x001506: u'BeamExpress, Inc',
+   0x001507: u'Renaissance Learning Inc',
+   0x001508: u'Global Target Enterprise Inc',
+   0x001509: u'Plus Technology Co., Ltd',
+   0x00150A: u'Sonoa Systems, Inc',
+   0x00150B: u'SAGE INFOTECH LTD.',
+   0x00150C: u'AVM GmbH',
+   0x00150D: u'Hoana Medical, Inc.',
+   0x00150E: u'OPENBRAIN TECHNOLOGIES CO., LTD.',
+   0x00150F: u'mingjong',
+   0x001510: u'Techsphere Co., Ltd',
+   0x001511: u'Data Center Systems',
+   0x001512: u'Zurich University of Applied Sciences',
+   0x001513: u'EFS sas',
+   0x001514: u'Hu Zhou NAVA Networks&Electronics Ltd.',
+   0x001515: u'Leipold+Co.GmbH',
+   0x001516: u'URIEL SYSTEMS INC.',
+   0x001517: u'Intel Corporate',
+   0x001518: u'Shenzhen 10MOONS Technology Development CO.,Ltd',
+   0x001519: u'StoreAge Networking Technologies',
+   0x00151A: u'Hunter Engineering Company',
+   0x00151B: u'Isilon Systems Inc.',
+   0x00151C: u'LENECO',
+   0x00151D: u'M2I CORPORATION',
+   0x00151E: u'Metaware Co., Ltd.',
+   0x00151F: u'Multivision Intelligent Surveillance (Hong Kong) Ltd',
+   0x001520: u'Radiocrafts AS',
+   0x001521: u'Horoquartz',
+   0x001522: u'Dea Security',
+   0x001523: u'Meteor Communications Corporation',
+   0x001524: u'Numatics, Inc.',
+   0x001525: u'PTI Integrated Systems, Inc.',
+   0x001526: u'Remote Technologies Inc',
+   0x001527: u'Balboa Instruments',
+   0x001528: u'Beacon Medical Products LLC d.b.a. BeaconMedaes',
+   0x001529: u'N3 Corporation',
+   0x00152A: u'Nokia GmbH',
+   0x00152B: u'Cisco Systems',
+   0x00152C: u'Cisco Systems',
+   0x00152D: u'TenX Networks, LLC',
+   0x00152E: u'PacketHop, Inc.',
+   0x00152F: u'Motorola CHS',
+   0x001530: u'Bus-Tech, Inc.',
+   0x001531: u'KOCOM',
+   0x001532: u'Consumer Technologies Group, LLC',
+   0x001533: u'NADAM.CO.,LTD',
+   0x001534: u'A BELTRÓNICA, Companhia de Comunicações, Lda',
+   0x001535: u'OTE Spa',
+   0x001536: u'Powertech co.,Ltd',
+   0x001537: u'Ventus Networks',
+   0x001538: u'RFID, Inc.',
+   0x001539: u'Technodrive SRL',
+   0x00153A: u'Shenzhen Syscan Technology Co.,Ltd.',
+   0x00153B: u'EMH Elektrizitätszähler GmbH & CoKG',
+   0x00153C: u'Kprotech Co., Ltd.',
+   0x00153D: u'ELIM PRODUCT CO.',
+   0x00153E: u'Q-Matic Sweden AB',
+   0x00153F: u'Alcatel Alenia Space Italia',
+   0x001540: u'Nortel',
+   0x001541: u'StrataLight Communications, Inc.',
+   0x001542: u'MICROHARD S.R.L.',
+   0x001543: u'Aberdeen Test Center',
+   0x001544: u'coM.s.a.t. AG',
+   0x001545: u'SEECODE Co., Ltd.',
+   0x001546: u'ITG Worldwide Sdn Bhd',
+   0x001547: u'AiZen Solutions Inc.',
+   0x001548: u'CUBE TECHNOLOGIES',
+   0x001549: u'Dixtal Biomedica Ind. Com. Ltda',
+   0x00154A: u'WANSHIH ELECTRONIC CO., LTD',
+   0x00154B: u'Wonde Proud Technology Co., Ltd',
+   0x00154C: u'Saunders Electronics',
+   0x00154D: u'Netronome Systems, Inc.',
+   0x00154E: u'Hirschmann Automation and Control GmbH',
+   0x00154F: u'one RF Technology',
+   0x001550: u'Nits Technology Inc',
+   0x001551: u'RadioPulse Inc.',
+   0x001552: u'Wi-Gear Inc.',
+   0x001553: u'Cytyc Corporation',
+   0x001554: u'Atalum Wireless S.A.',
+   0x001555: u'DFM GmbH',
+   0x001556: u'SAGEM SA',
+   0x001557: u'Olivetti',
+   0x001558: u'FOXCONN',
+   0x001559: u'Securaplane Technologies, Inc.',
+   0x00155A: u'DAINIPPON PHARMACEUTICAL CO., LTD.',
+   0x00155B: u'Sampo Corporation',
+   0x00155C: u'Dresser Wayne',
+   0x00155D: u'Microsoft Corporation',
+   0x00155E: u'Morgan Stanley',
+   0x00155F: u'Ubiwave',
+   0x001560: u'Hewlett Packard',
+   0x001561: u'JJPlus Corporation',
+   0x001562: u'Cisco Systems',
+   0x001563: u'Cisco Systems',
+   0x001564: u'BEHRINGER Spezielle Studiotechnik GmbH',
+   0x001565: u'XIAMEN YEALINK NETWORK TECHNOLOGY CO.,LTD',
+   0x001566: u'A-First Technology Co., Ltd.',
+   0x001567: u'RADWIN Inc.',
+   0x001568: u'Dilithium Networks',
+   0x001569: u'PECO II, Inc.',
+   0x00156A: u'DG2L Technologies Pvt. Ltd.',
+   0x00156B: u'Perfisans Networks Corp.',
+   0x00156C: u'SANE SYSTEM CO., LTD',
+   0x00156D: u'Ubiquiti Networks',
+   0x00156E: u'A. W. Communication Systems Ltd',
+   0x00156F: u'Xiranet Communications GmbH',
+   0x001570: u'Symbol Technologies',
+   0x001571: u'Nolan Systems',
+   0x001572: u'Red-Lemon',
+   0x001573: u'NewSoft  Technology Corporation',
+   0x001574: u'Horizon Semiconductors Ltd.',
+   0x001575: u'Nevis Networks Inc.',
+   0x001576: u'scil animal care company GmbH',
+   0x001577: u'Allied Telesyn, Inc.',
+   0x001578: u'Audio / Video Innovations',
+   0x001579: u'Lunatone Industrielle Elektronik GmbH',
+   0x00157A: u'Telefin S.p.A.',
+   0x00157B: u'Leuze electronic GmbH + Co. KG',
+   0x00157C: u'Dave Networks, Inc.',
+   0x00157D: u'POSDATA CO., LTD.',
+   0x00157E: u'HEYFRA ELECTRONIC gmbH',
+   0x00157F: u'ChuanG International Holding CO.,LTD.',
+   0x001580: u'U-WAY CORPORATION',
+   0x001581: u'MAKUS Inc.',
+   0x001582: u'TVonics Ltd',
+   0x001583: u'IVT corporation',
+   0x001584: u'Schenck Process GmbH',
+   0x001585: u'Aonvision Technolopy Corp.',
+   0x001586: u'Xiamen Overseas Chinese Electronic Co., Ltd.',
+   0x001587: u'Takenaka Seisakusho Co.,Ltd',
+   0x001588: u'Balda-Thong Fook Solutions Sdn. Bhd.',
+   0x001589: u'D-MAX Technology Co.,Ltd',
+   0x00158A: u'SURECOM Technology Corp.',
+   0x00158B: u'Park Air Systems Ltd',
+   0x00158C: u'Liab ApS',
+   0x00158D: u'Jennic Ltd',
+   0x00158E: u'Plustek.INC',
+   0x00158F: u'NTT Advanced Technology Corporation',
+   0x001590: u'Hectronic GmbH',
+   0x001591: u'RLW Inc.',
+   0x001592: u'Facom UK Ltd (Melksham)',
+   0x001593: u'U4EA Technologies Inc.',
+   0x001594: u'BIXOLON CO.,LTD',
+   0x001595: u'Quester Tangent Corporation',
+   0x001596: u'ARRIS International',
+   0x001597: u'AETA AUDIO SYSTEMS',
+   0x001598: u'Kolektor group',
+   0x001599: u'Samsung Electronics Co., LTD',
+   0x00159A: u'Motorola CHS',
+   0x00159B: u'Nortel',
+   0x00159C: u'B-KYUNG SYSTEM Co.,Ltd.',
+   0x00159D: u'Minicom Advanced Systems ltd',
+   0x00159E: u'Saitek plc',
+   0x00159F: u'Terascala, Inc.',
+   0x0015A0: u'Nokia Danmark A/S',
+   0x0015A1: u'SINTERS SAS',
+   0x0015A2: u'ARRIS International',
+   0x0015A3: u'ARRIS International',
+   0x0015A4: u'ARRIS International',
+   0x0015A5: u'DCI Co., Ltd.',
+   0x0015A6: u'Digital Electronics Products Ltd.',
+   0x0015A7: u'Robatech AG',
+   0x0015A8: u'Motorola Mobile Devices',
+   0x0015A9: u'KWANG WOO I&C CO.,LTD',
+   0x0015AA: u'Rextechnik International Co.,',
+   0x0015AB: u'PRO CO SOUND INC',
+   0x0015AC: u'Capelon AB',
+   0x0015AD: u'Accedian Networks',
+   0x0015AE: u'kyung il',
+   0x0015AF: u'AzureWave Technologies, Inc.',
+   0x0015B0: u'AUTOTELENET CO.,LTD',
+   0x0015B1: u'Ambient Corporation',
+   0x0015B2: u'Advanced Industrial Computer, Inc.',
+   0x0015B3: u'Caretech AB',
+   0x0015B4: u'Polymap  Wireless LLC',
+   0x0015B5: u'CI Network Corp.',
+   0x0015B6: u'ShinMaywa Industries, Ltd.',
+   0x0015B7: u'Toshiba',
+   0x0015B8: u'Tahoe',
+   0x0015B9: u'Samsung Electronics Co., Ltd.',
+   0x0015BA: u'iba AG',
+   0x0015BB: u'SMA Technologie AG',
+   0x0015BC: u'Develco',
+   0x0015BD: u'Group 4 Technology Ltd',
+   0x0015BE: u'Iqua Ltd.',
+   0x0015BF: u'technicob',
+   0x0015C0: u'DIGITAL TELEMEDIA CO.,LTD.',
+   0x0015C1: u'SONY Computer Entertainment inc,',
+   0x0015C2: u'3M Germany',
+   0x0015C3: u'Ruf Telematik AG',
+   0x0015C4: u'FLOVEL CO., LTD.',
+   0x0015C5: u'Dell Inc',
+   0x0015C6: u'Cisco Systems',
+   0x0015C7: u'Cisco Systems',
+   0x0015C8: u'FlexiPanel Ltd',
+   0x0015C9: u'Gumstix, Inc',
+   0x0015CA: u'TeraRecon, Inc.',
+   0x0015CB: u'Surf Communication Solutions Ltd.',
+   0x0015CC: u'TEPCO UQUEST, LTD.',
+   0x0015CD: u'Exartech International Corp.',
+   0x0015CE: u'ARRIS International',
+   0x0015CF: u'ARRIS International',
+   0x0015D0: u'ARRIS International',
+   0x0015D1: u'ARRIS International',
+   0x0015D2: u'Xantech Corporation',
+   0x0015D3: u'Pantech&Curitel Communications, Inc.',
+   0x0015D4: u'Emitor AB',
+   0x0015D5: u'NICEVT',
+   0x0015D6: u'OSLiNK Sp. z o.o.',
+   0x0015D7: u'Reti Corporation',
+   0x0015D8: u'Interlink Electronics',
+   0x0015D9: u'PKC Electronics Oy',
+   0x0015DA: u'IRITEL A.D.',
+   0x0015DB: u'Canesta Inc.',
+   0x0015DC: u'KT&C Co., Ltd.',
+   0x0015DD: u'IP Control Systems Ltd.',
+   0x0015DE: u'Nokia Danmark A/S',
+   0x0015DF: u'Clivet S.p.A.',
+   0x0015E0: u'Ericsson Mobile Platforms',
+   0x0015E1: u'picoChip Designs Ltd',
+   0x0015E2: u'Wissenschaftliche Geraetebau Dr. Ing. H. Knauer GmbH',
+   0x0015E3: u'Dream Technologies Corporation',
+   0x0015E4: u'Zimmer Elektromedizin',
+   0x0015E5: u'Cheertek Inc.',
+   0x0015E6: u'MOBILE TECHNIKA Inc.',
+   0x0015E7: u'Quantec ProAudio',
+   0x0015E8: u'Nortel',
+   0x0015E9: u'D-Link Corporation',
+   0x0015EA: u'Tellumat (Pty) Ltd',
+   0x0015EB: u'ZTE CORPORATION',
+   0x0015EC: u'Boca Devices LLC',
+   0x0015ED: u'Fulcrum Microsystems, Inc.',
+   0x0015EE: u'Omnex Control Systems',
+   0x0015EF: u'NEC TOKIN Corporation',
+   0x0015F0: u'EGO BV',
+   0x0015F1: u'KYLINK Communications Corp.',
+   0x0015F2: u'ASUSTek COMPUTER INC.',
+   0x0015F3: u'PELTOR AB',
+   0x0015F4: u'Eventide',
+   0x0015F5: u'Sustainable Energy Systems',
+   0x0015F6: u'SCIENCE AND ENGINEERING SERVICES, INC.',
+   0x0015F7: u'Wintecronics Ltd.',
+   0x0015F8: u'Kingtronics Industrial Co. Ltd.',
+   0x0015F9: u'Cisco Systems',
+   0x0015FA: u'Cisco Systems',
+   0x0015FB: u'setex schermuly textile computer gmbh',
+   0x0015FC: u'Startco Engineering Ltd.',
+   0x0015FD: u'Complete Media Systems',
+   0x0015FE: u'SCHILLING ROBOTICS LLC',
+   0x0015FF: u'Novatel Wireless, Inc.',
+   0x001600: u'CelleBrite Mobile Synchronization',
+   0x001601: u'Buffalo Inc.',
+   0x001602: u'CEYON TECHNOLOGY CO.,LTD.',
+   0x001603: u'PRIVATE',
+   0x001604: u'Sigpro',
+   0x001605: u'YORKVILLE SOUND INC.',
+   0x001606: u'Ideal Industries',
+   0x001607: u'Curves International Inc.',
+   0x001608: u'Sequans Communications',
+   0x001609: u'Unitech electronics co., ltd.',
+   0x00160A: u'SWEEX Europe BV',
+   0x00160B: u'TVWorks LLC',
+   0x00160C: u'LPL  DEVELOPMENT S.A. DE C.V',
+   0x00160D: u'Be Here Corporation',
+   0x00160E: u'Optica Technologies Inc.',
+   0x00160F: u'BADGER METER INC',
+   0x001610: u'Carina Technology',
+   0x001611: u'Altecon Srl',
+   0x001612: u'Otsuka Electronics Co., Ltd.',
+   0x001613: u'LibreStream Technologies Inc.',
+   0x001614: u'Picosecond Pulse Labs',
+   0x001615: u'Nittan Company, Limited',
+   0x001616: u'BROWAN COMMUNICATION INC.',
+   0x001617: u'MSI',
+   0x001618: u'HIVION Co., Ltd.',
+   0x001619: u'La Factoría de Comunicaciones Aplicadas,S.L.',
+   0x00161A: u'Dametric AB',
+   0x00161B: u'Micronet Corporation',
+   0x00161C: u'e:cue',
+   0x00161D: u'Innovative Wireless Technologies, Inc.',
+   0x00161E: u'Woojinnet',
+   0x00161F: u'SUNWAVETEC Co., Ltd.',
+   0x001620: u'Sony Ericsson Mobile Communications AB',
+   0x001621: u'Colorado Vnet',
+   0x001622: u'BBH SYSTEMS GMBH',
+   0x001623: u'Interval Media',
+   0x001624: u'PRIVATE',
+   0x001625: u'Impinj, Inc.',
+   0x001626: u'Motorola CHS',
+   0x001627: u'embedded-logic DESIGN AND MORE GmbH',
+   0x001628: u'Ultra Electronics Manufacturing and Card Systems',
+   0x001629: u'Nivus GmbH',
+   0x00162A: u'Antik computers & communications s.r.o.',
+   0x00162B: u'Togami Electric Mfg.co.,Ltd.',
+   0x00162C: u'Xanboo',
+   0x00162D: u'STNet Co., Ltd.',
+   0x00162E: u'Space Shuttle Hi-Tech Co., Ltd.',
+   0x00162F: u'Geutebrück GmbH',
+   0x001630: u'Vativ Technologies',
+   0x001631: u'Xteam',
+   0x001632: u'SAMSUNG ELECTRONICS CO., LTD.',
+   0x001633: u'Oxford Diagnostics Ltd.',
+   0x001634: u'Mathtech, Inc.',
+   0x001635: u'Hewlett Packard',
+   0x001636: u'Quanta Computer Inc.',
+   0x001637: u'Citel Srl',
+   0x001638: u'TECOM Co., Ltd.',
+   0x001639: u'UBIQUAM Co.,Ltd',
+   0x00163A: u'YVES TECHNOLOGY CO., LTD.',
+   0x00163B: u'VertexRSI/General Dynamics',
+   0x00163C: u'Rebox B.V.',
+   0x00163D: u'Tsinghua Tongfang Legend Silicon Tech. Co., Ltd.',
+   0x00163E: u'Xensource, Inc.',
+   0x00163F: u'CReTE SYSTEMS Inc.',
+   0x001640: u'Asmobile Communication Inc.',
+   0x001641: u'USI',
+   0x001642: u'Pangolin',
+   0x001643: u'Sunhillo Corproation',
+   0x001644: u'LITE-ON Technology Corp.',
+   0x001645: u'Power Distribution, Inc.',
+   0x001646: u'Cisco Systems',
+   0x001647: u'Cisco Systems',
+   0x001648: u'SSD Company Limited',
+   0x001649: u'SetOne GmbH',
+   0x00164A: u'Vibration Technology Limited',
+   0x00164B: u'Quorion Data Systems GmbH',
+   0x00164C: u'PLANET INT Co., Ltd',
+   0x00164D: u'Alcatel North America IP Division',
+   0x00164E: u'Nokia Danmark A/S',
+   0x00164F: u'World Ethnic Broadcastin Inc.',
+   0x001650: u'EYAL MICROWAVE',
+   0x001651: u'PRIVATE',
+   0x001652: u'Hoatech Technologies, Inc.',
+   0x001653: u'LEGO System A/S IE Electronics Division',
+   0x001654: u'Flex-P Industries Sdn. Bhd.',
+   0x001655: u'FUHO TECHNOLOGY Co., LTD',
+   0x001656: u'Nintendo Co., Ltd.',
+   0x001657: u'Aegate Ltd',
+   0x001658: u'Fusiontech Technologies Inc.',
+   0x001659: u'Z.M.P. RADWAG',
+   0x00165A: u'Harman Specialty Group',
+   0x00165B: u'Grip Audio',
+   0x00165C: u'Trackflow Ltd',
+   0x00165D: u'AirDefense, Inc.',
+   0x00165E: u'Precision I/O',
+   0x00165F: u'Fairmount Automation',
+   0x001660: u'Nortel',
+   0x001661: u'Novatium Solutions (P) Ltd',
+   0x001662: u'Liyuh Technology Ltd.',
+   0x001663: u'KBT Mobile',
+   0x001664: u'Prod-El SpA',
+   0x001665: u'Cellon France',
+   0x001666: u'Quantier Communication Inc.',
+   0x001667: u'A-TEC Subsystem INC.',
+   0x001668: u'Eishin Electronics',
+   0x001669: u'MRV Communication (Networks) LTD',
+   0x00166A: u'TPS',
+   0x00166B: u'Samsung Electronics',
+   0x00166C: u'Samsung Electonics Digital Video System Division',
+   0x00166D: u'Yulong Computer Telecommunication Scientific(shenzhen)Co.,Lt',
+   0x00166E: u'Arbitron Inc.',
+   0x00166F: u'Intel Corporation',
+   0x001670: u'SKNET Corporation',
+   0x001671: u'Symphox Information Co.',
+   0x001672: u'Zenway enterprise ltd',
+   0x001673: u'PRIVATE',
+   0x001674: u'EuroCB (Phils.), Inc.',
+   0x001675: u'Motorola MDb',
+   0x001676: u'Intel Corporation',
+   0x001677: u'Bihl+Wiedemann GmbH',
+   0x001678: u'SHENZHEN BAOAN GAOKE ELECTRONICS CO., LTD',
+   0x001679: u'eOn Communications',
+   0x00167A: u'Skyworth Overseas Dvelopment Ltd.',
+   0x00167B: u'Haver&Boecker',
+   0x00167C: u'iRex Technologies BV',
+   0x00167D: u'Sky-Line',
+   0x00167E: u'DIBOSS.CO.,LTD',
+   0x00167F: u'Bluebird Soft Inc.',
+   0x001680: u'Bally Gaming + Systems',
+   0x001681: u'Vector Informatik GmbH',
+   0x001682: u'Pro Dex, Inc',
+   0x001683: u'WEBIO International Co.,.Ltd.',
+   0x001684: u'Donjin Co.,Ltd.',
+   0x001685: u'FRWD Technologies Ltd.',
+   0x001686: u'Karl Storz Imaging',
+   0x001687: u'Chubb CSC-Vendor AP',
+   0x001688: u'ServerEngines LLC',
+   0x001689: u'Pilkor Electronics Co., Ltd',
+   0x00168A: u'id-Confirm Inc',
+   0x00168B: u'Paralan Corporation',
+   0x00168C: u'DSL Partner AS',
+   0x00168D: u'KORWIN CO., Ltd.',
+   0x00168E: u'Vimicro corporation',
+   0x00168F: u'GN Netcom as',
+   0x001690: u'J-TEK INCORPORATION',
+   0x001691: u'Moser-Baer AG',
+   0x001692: u'Scientific-Atlanta, Inc.',
+   0x001693: u'PowerLink Technology Inc.',
+   0x001694: u'Sennheiser Communications A/S',
+   0x001695: u'AVC Technology Limited',
+   0x001696: u'QDI Technology (H.K.) Limited',
+   0x001697: u'NEC Corporation',
+   0x001698: u'T&A Mobile Phones SAS',
+   0x001699: u'PRIVATE',
+   0x00169A: u'Quadrics Ltd',
+   0x00169B: u'Alstom Transport',
+   0x00169C: u'Cisco Systems',
+   0x00169D: u'Cisco Systems',
+   0x00169E: u'TV One Ltd',
+   0x00169F: u'Vimtron Electronics Co., Ltd.',
+   0x0016A0: u'Auto-Maskin',
+   0x0016A1: u'3Leaf Networks',
+   0x0016A2: u'CentraLite Systems, Inc.',
+   0x0016A3: u'TEAM ARTECHE, S.A.',
+   0x0016A4: u'Ezurio Ltd',
+   0x0016A5: u'Tandberg Storage ASA',
+   0x0016A6: u'Dovado FZ-LLC',
+   0x0016A7: u'AWETA G&P',
+   0x0016A8: u'CWT CO., LTD.',
+   0x0016A9: u'2EI',
+   0x0016AA: u'Kei Communication Technology Inc.',
+   0x0016AB: u'PBI-Dansensor A/S',
+   0x0016AC: u'Toho Technology Corp.',
+   0x0016AD: u'BT-Links Company Limited',
+   0x0016AE: u'INVENTEL',
+   0x0016AF: u'Shenzhen Union Networks Equipment Co.,Ltd.',
+   0x0016B0: u'VK Corporation',
+   0x0016B1: u'KBS',
+   0x0016B2: u'DriveCam Inc',
+   0x0016B3: u'Photonicbridges (China) Co., Ltd.',
+   0x0016B4: u'PRIVATE',
+   0x0016B5: u'Motorola CHS',
+   0x0016B6: u'Cisco-Linksys',
+   0x0016B7: u'Seoul Commtech',
+   0x0016B8: u'Sony Ericsson Mobile Communications',
+   0x0016B9: u'ProCurve Networking',
+   0x0016BA: u'WEATHERNEWS INC.',
+   0x0016BB: u'Law-Chain Computer Technology Co Ltd',
+   0x0016BC: u'Nokia Danmark A/S',
+   0x0016BD: u'ATI Industrial Automation',
+   0x0016BE: u'INFRANET, Inc.',
+   0x0016BF: u'PaloDEx Group Oy',
+   0x0016C0: u'Semtech Corporation',
+   0x0016C1: u'Eleksen Ltd',
+   0x0016C2: u'Avtec Systems Inc',
+   0x0016C3: u'BA Systems Inc',
+   0x0016C4: u'SiRF Technology, Inc.',
+   0x0016C5: u'Shenzhen Xing Feng Industry Co.,Ltd',
+   0x0016C6: u'North Atlantic Industries',
+   0x0016C7: u'Cisco Systems',
+   0x0016C8: u'Cisco Systems',
+   0x0016C9: u'NAT Seattle, Inc.',
+   0x0016CA: u'Nortel',
+   0x0016CB: u'Apple Computer',
+   0x0016CC: u'Xcute Mobile Corp.',
+   0x0016CD: u'HIJI HIGH-TECH CO., LTD.',
+   0x0016CE: u'Hon Hai Precision Ind. Co., Ltd.',
+   0x0016CF: u'Hon Hai Precision Ind. Co., Ltd.',
+   0x0016D0: u'ATech elektronika d.o.o.',
+   0x0016D1: u'ZAT a.s.',
+   0x0016D2: u'Caspian',
+   0x0016D3: u'Wistron Corporation',
+   0x0016D4: u'Compal Communications, Inc.',
+   0x0016D5: u'Synccom Co., Ltd',
+   0x0016D6: u'TDA Tech Pty Ltd',
+   0x0016D7: u'Sunways AG',
+   0x0016D8: u'Senea AB',
+   0x0016D9: u'NINGBO BIRD CO.,LTD.',
+   0x0016DA: u'Futronic Technology Co. Ltd.',
+   0x0016DB: u'Samsung Electronics Co., Ltd.',
+   0x0016DC: u'ARCHOS',
+   0x0016DD: u'Gigabeam Corporation',
+   0x0016DE: u'FAST Inc',
+   0x0016DF: u'Lundinova AB',
+   0x0016E0: u'3Com Europe Ltd',
+   0x0016E1: u'SiliconStor, Inc.',
+   0x0016E2: u'American Fibertek, Inc.',
+   0x0016E3: u'ASKEY COMPUTER CORP.',
+   0x0016E4: u'VANGUARD SECURITY ENGINEERING CORP.',
+   0x0016E5: u'FORDLEY DEVELOPMENT LIMITED',
+   0x0016E6: u'GIGA-BYTE TECHNOLOGY CO.,LTD.',
+   0x0016E7: u'Dynamix Promotions Limited',
+   0x0016E8: u'Sigma Designs, Inc.',
+   0x0016E9: u'Tiba Medical Inc',
+   0x0016EA: u'Intel Corporation',
+   0x0016EB: u'Intel Corporation',
+   0x0016EC: u'Elitegroup Computer Systems Co., Ltd.',
+   0x0016ED: u'Integrian, Inc.',
+   0x0016EE: u'RoyalDigital Inc.',
+   0x0016EF: u'Koko Fitness, Inc.',
+   0x0016F0: u'Zermatt Systems, Inc',
+   0x0016F1: u'OmniSense, LLC',
+   0x0016F2: u'Dmobile System Co., Ltd.',
+   0x0016F3: u'CAST Information Co., Ltd',
+   0x0016F4: u'Eidicom Co., Ltd.',
+   0x0016F5: u'Dalian Golden Hualu Digital Technology Co.,Ltd',
+   0x0016F6: u'Video Products Group',
+   0x0016F7: u'L-3 Communications, Electrodynamics, Inc.',
+   0x0016F8: u'AVIQTECH TECHNOLOGY CO., LTD.',
+   0x0016F9: u'CETRTA POT, d.o.o., Kranj',
+   0x0016FA: u'ECI Telecom Ltd.',
+   0x0016FB: u'SHENZHEN MTC CO.,LTD.',
+   0x0016FC: u'TOHKEN CO.,LTD.',
+   0x0016FD: u'Jaty Electronics',
+   0x0016FE: u'Alps Electric Co., Ltd',
+   0x0016FF: u'Wamin Optocomm Mfg Corp',
+   0x001700: u'Motorola MDb',
+   0x001701: u'KDE, Inc.',
+   0x001702: u'Osung Midicom Co., Ltd',
+   0x001703: u'MOSDAN Internation Co.,Ltd',
+   0x001704: u'Shinco Electronics Group Co.,Ltd',
+   0x001705: u'Methode Electronics',
+   0x001706: u'Techfaith Wireless Communication Technology Limited.',
+   0x001707: u'InGrid, Inc',
+   0x001708: u'Hewlett Packard',
+   0x001709: u'Exalt Communications',
+   0x00170A: u'INEW DIGITAL COMPANY',
+   0x00170B: u'Contela, Inc.',
+   0x00170C: u'Benefon Oyj',
+   0x00170D: u'Dust Networks Inc.',
+   0x00170E: u'Cisco Systems',
+   0x00170F: u'Cisco Systems',
+   0x001710: u'Casa Systems Inc.',
+   0x001711: u'GE Healthcare Bio-Sciences AB',
+   0x001712: u'ISCO International',
+   0x001713: u'Tiger NetCom',
+   0x001714: u'BR Controls Nederland bv',
+   0x001715: u'Qstik',
+   0x001716: u'Qno Technology Inc.',
+   0x001717: u'Leica Geosystems AG',
+   0x001718: u'Vansco Electronics Oy',
+   0x001719: u'AudioCodes USA, Inc',
+   0x00171A: u'Winegard Company',
+   0x00171B: u'Innovation Lab Corp.',
+   0x00171C: u'NT MicroSystems, Inc.',
+   0x00171D: u'DIGIT',
+   0x00171E: u'Theo Benning GmbH & Co. KG',
+   0x00171F: u'IMV Corporation',
+   0x001720: u'Image Sensing Systems, Inc.',
+   0x001721: u'FITRE S.p.A.',
+   0x001722: u'Hanazeder Electronic GmbH',
+   0x001723: u'Summit Data Communications',
+   0x001724: u'Studer Professional Audio GmbH',
+   0x001725: u'Liquid Computing',
+   0x001726: u'm2c Electronic Technology Ltd.',
+   0x001727: u'Thermo Ramsey Italia s.r.l.',
+   0x001728: u'Selex Communications',
+   0x001729: u'Ubicod Co.LTD',
+   0x00172A: u'Proware Technology Corp.',
+   0x00172B: u'Global Technologies Inc.',
+   0x00172C: u'TAEJIN INFOTECH',
+   0x00172D: u'Axcen Photonics Corporation',
+   0x00172E: u'FXC Inc.',
+   0x00172F: u'NeuLion Incorporated',
+   0x001730: u'Automation Electronics',
+   0x001731: u'ASUSTek COMPUTER INC.',
+   0x001732: u'Science-Technical Center "RISSA"',
+   0x001733: u'neuf cegetel',
+   0x001734: u'LGC Wireless Inc.',
+   0x001735: u'PRIVATE',
+   0x001736: u'iiTron Inc.',
+   0x001737: u'Industrie Dial Face S.p.A.',
+   0x001738: u'XIV',
+   0x001739: u'Bright Headphone Electronics Company',
+   0x00173A: u'Edge Integration Systems Inc.',
+   0x00173B: u'Arched Rock Corporation',
+   0x00173C: u'Extreme Engineering Solutions',
+   0x00173D: u'Neology',
+   0x00173E: u'LeucotronEquipamentos Ltda.',
+   0x00173F: u'Belkin Corporation',
+   0x001740: u'Technologies Labtronix',
+   0x001741: u'DEFIDEV',
+   0x001742: u'FUJITSU LIMITED',
+   0x001743: u'Deck Srl',
+   0x001744: u'Araneo Ltd.',
+   0x001745: u'INNOTZ CO., Ltd',
+   0x001746: u'Freedom9 Inc.',
+   0x001747: u'Trimble',
+   0x001748: u'Neokoros Brasil Ltda',
+   0x001749: u'HYUNDAE YONG-O-SA CO.,LTD',
+   0x00174A: u'SOCOMEC',
+   0x00174B: u'Nokia Danmark A/S',
+   0x00174C: u'Millipore',
+   0x00174D: u'DYNAMIC NETWORK FACTORY, INC.',
+   0x00174E: u'Parama-tech Co.,Ltd.',
+   0x00174F: u'iCatch Inc.',
+   0x001750: u'GSI Group, MicroE Systems',
+   0x001751: u'Online Corporation',
+   0x001752: u'DAGS, Inc',
+   0x001753: u'nFore Technology Inc.',
+   0x001754: u'Arkino Corporation., Ltd',
+   0x001755: u'GE Security',
+   0x001756: u'Vinci Labs Oy',
+   0x001757: u'RIX TECHNOLOGY LIMITED',
+   0x001758: u'ThruVision Ltd',
+   0x001759: u'Cisco Systems',
+   0x00175A: u'Cisco Systems',
+   0x00175B: u'ACS Solutions Switzerland Ltd.',
+   0x00175C: u'SHARP CORPORATION',
+   0x00175D: u'Dongseo system.',
+   0x00175E: u'Anta Systems, Inc.',
+   0x00175F: u'XENOLINK Communications Co., Ltd.',
+   0x001760: u'Naito Densei Machida MFG.CO.,LTD',
+   0x001761: u'ZKSoftware Inc.',
+   0x001762: u'Solar Technology, Inc.',
+   0x001763: u'Essentia S.p.A.',
+   0x001764: u'ATMedia GmbH',
+   0x001765: u'Nortel',
+   0x001766: u'Accense Technology, Inc.',
+   0x001767: u'Earforce AS',
+   0x001768: u'Zinwave Ltd',
+   0x001769: u'Cymphonix Corp',
+   0x00176A: u'Avago Technologies',
+   0x00176B: u'Kiyon, Inc.',
+   0x00176C: u'Pivot3, Inc.',
+   0x00176D: u'CORE CORPORATION',
+   0x00176E: u'DUCATI SISTEMI',
+   0x00176F: u'PAX Computer Technology(Shenzhen) Ltd.',
+   0x001770: u'Arti Industrial Electronics Ltd.',
+   0x001771: u'APD Communications Ltd',
+   0x001772: u'ASTRO Strobel Kommunikationssysteme GmbH',
+   0x001773: u'Laketune Technologies Co. Ltd',
+   0x001774: u'Elesta GmbH',
+   0x001775: u'TTE Germany GmbH',
+   0x001776: u'Meso Scale Diagnostics, LLC',
+   0x001777: u'Obsidian Research Corporation',
+   0x001778: u'Central Music Co.',
+   0x001779: u'QuickTel',
+   0x00177A: u'ASSA ABLOY AB',
+   0x00177B: u'Azalea Networks inc',
+   0x00177C: u'D-Link India Ltd',
+   0x00177D: u'IDT International Limited',
+   0x00177E: u'Meshcom Technologies Inc.',
+   0x00177F: u'Worldsmart Retech',
+   0x001780: u'Applera Holding B.V. Singapore Operations',
+   0x001781: u'Greystone Data System, Inc.',
+   0x001782: u'LoBenn Inc.',
+   0x001783: u'Texas Instruments',
+   0x001784: u'Motorola Mobile Devices',
+   0x001785: u'Sparr Electronics Ltd',
+   0x001786: u'wisembed',
+   0x001787: u'Brother, Brother & Sons ApS',
+   0x001788: u'Philips Lighting BV',
+   0x001789: u'Zenitron Corporation',
+   0x00178A: u'DARTS TECHNOLOGIES CORP.',
+   0x00178B: u'Teledyne Technologies Incorporated',
+   0x00178C: u'Independent Witness, Inc',
+   0x00178D: u'Checkpoint Systems, Inc.',
+   0x00178E: u'Gunnebo Cash Automation AB',
+   0x00178F: u'NINGBO YIDONG ELECTRONIC CO.,LTD.',
+   0x001790: u'HYUNDAI DIGITECH Co, Ltd.',
+   0x001791: u'LinTech GmbH',
+   0x001792: u'Falcom Wireless Comunications Gmbh',
+   0x001793: u'Tigi Corporation',
+   0x001794: u'Cisco Systems',
+   0x001795: u'Cisco Systems',
+   0x001796: u'Rittmeyer AG',
+   0x001797: u'Telsy Elettronica S.p.A.',
+   0x001798: u'Azonic Technology Co., LTD',
+   0x001799: u'SmarTire Systems Inc.',
+   0x00179A: u'D-Link Corporation',
+   0x00179B: u'Chant Sincere CO., LTD.',
+   0x00179C: u'DEPRAG SCHULZ GMBH u. CO.',
+   0x00179D: u'Kelman Limited',
+   0x00179E: u'Sirit Inc',
+   0x00179F: u'Apricorn',
+   0x0017A0: u'RoboTech srl',
+   0x0017A1: u'3soft inc.',
+   0x0017A2: u'Camrivox Ltd.',
+   0x0017A3: u'MIX s.r.l.',
+   0x0017A4: u'Global Data Services',
+   0x0017A5: u'TrendChip Technologies Corp.',
+   0x0017A6: u'YOSIN ELECTRONICS CO., LTD.',
+   0x0017A7: u'Mobile Computing Promotion Consortium',
+   0x0017A8: u'EDM Corporation',
+   0x0017A9: u'Sentivision',
+   0x0017AA: u'elab-experience inc.',
+   0x0017AB: u'Nintendo Co., Ltd.',
+   0x0017AC: u'O\'Neil Product Development Inc.',
+   0x0017AD: u'AceNet Corporation',
+   0x0017AE: u'GAI-Tronics',
+   0x0017AF: u'Enermet',
+   0x0017B0: u'Nokia Danmark A/S',
+   0x0017B1: u'ACIST Medical Systems, Inc.',
+   0x0017B2: u'SK Telesys',
+   0x0017B3: u'Aftek Infosys Limited',
+   0x0017B4: u'Remote Security Systems, LLC',
+   0x0017B5: u'Peerless Systems Corporation',
+   0x0017B6: u'Aquantia',
+   0x0017B7: u'Tonze Technology Co.',
+   0x0017B8: u'NOVATRON CO., LTD.',
+   0x0017B9: u'Gambro Lundia AB',
+   0x0017BA: u'SEDO CO., LTD.',
+   0x0017BB: u'Syrinx Industrial Electronics',
+   0x0017BC: u'Touchtunes Music Corporation',
+   0x0017BD: u'Tibetsystem',
+   0x0017BE: u'Tratec Telecom B.V.',
+   0x0017BF: u'Coherent Research Limited',
+   0x0017C0: u'PureTech Systems, Inc.',
+   0x0017C1: u'CM Precision Technology LTD.',
+   0x0017C2: u'Pirelli Broadband Solutions',
+   0x0017C3: u'KTF Technologies Inc.',
+   0x0017C4: u'Quanta Microsystems, INC.',
+   0x0017C5: u'SonicWALL',
+   0x0017C6: u'Labcal Technologies',
+   0x0017C7: u'MARA Systems Consulting AB',
+   0x0017C8: u'Kyocera Mita Corporation',
+   0x0017C9: u'Samsung Electronics Co., Ltd.',
+   0x0017CA: u'BenQ Corporation',
+   0x0017CB: u'Juniper Networks',
+   0x0017CC: u'Alcatel USA Sourcing LP',
+   0x0017CD: u'CEC Wireless R&D Ltd.',
+   0x0017CE: u'MB International Telecom Labs srl',
+   0x0017CF: u'iMCA-GmbH',
+   0x0017D0: u'Opticom Communications, LLC',
+   0x0017D1: u'Nortel',
+   0x0017D2: u'THINLINX PTY LTD',
+   0x0017D3: u'Etymotic Research, Inc.',
+   0x0017D4: u'Monsoon Multimedia, Inc',
+   0x0017D5: u'Samsung Electronics Co., Ltd.',
+   0x0017D6: u'Bluechips Microhouse Co.,Ltd.',
+   0x0017D7: u'Input/Output Inc.',
+   0x0017D8: u'Magnum Semiconductor, Inc.',
+   0x0017D9: u'AAI Corporation',
+   0x0017DA: u'Spans Logic',
+   0x0017DB: u'PRIVATE',
+   0x0017DC: u'DAEMYUNG ZERO1',
+   0x0017DD: u'Clipsal Australia',
+   0x0017DE: u'Advantage Six Ltd',
+   0x0017DF: u'Cisco Systems',
+   0x0017E0: u'Cisco Systems',
+   0x0017E1: u'DACOS Technologies Co., Ltd.',
+   0x0017E2: u'Motorola Mobile Devices',
+   0x0017E3: u'Texas Instruments',
+   0x0017E4: u'Texas Instruments',
+   0x0017E5: u'Texas Instruments',
+   0x0017E6: u'Texas Instruments',
+   0x0017E7: u'Texas Instruments',
+   0x0017E8: u'Texas Instruments',
+   0x0017E9: u'Texas Instruments',
+   0x0017EA: u'Texas Instruments',
+   0x0017EB: u'Texas Instruments',
+   0x0017EC: u'Texas Instruments',
+   0x0017ED: u'WooJooIT Ltd.',
+   0x0017EE: u'Motorola CHS',
+   0x0017EF: u'Blade Network Technologies, Inc.',
+   0x0017F0: u'SZCOM Broadband Network Technology Co.,Ltd',
+   0x0017F1: u'Renu Electronics Pvt Ltd',
+   0x0017F2: u'Apple Computer',
+   0x0017F3: u'M/A-COM Wireless Systems',
+   0x0017F4: u'ZERON ALLIANCE',
+   0x0017F5: u'NEOPTEK',
+   0x0017F6: u'Pyramid Meriden Inc.',
+   0x0017F7: u'CEM Solutions Pvt Ltd',
+   0x0017F8: u'Motech Industries Inc.',
+   0x0017F9: u'Forcom Sp. z o.o.',
+   0x0017FA: u'Microsoft Corporation',
+   0x0017FB: u'FA',
+   0x0017FC: u'Suprema Inc.',
+   0x0017FD: u'Amulet Hotkey',
+   0x0017FE: u'TALOS SYSTEM INC.',
+   0x0017FF: u'PLAYLINE Co.,Ltd.',
+   0x001800: u'UNIGRAND LTD',
+   0x001801: u'Actiontec Electronics, Inc',
+   0x001802: u'Alpha Networks Inc.',
+   0x001803: u'ArcSoft Shanghai Co. LTD',
+   0x001804: u'E-TEK DIGITAL TECHNOLOGY LIMITED',
+   0x001805: u'Beijing InHand Networking',
+   0x001806: u'Hokkei Industries Co., Ltd.',
+   0x001807: u'Fanstel Corp.',
+   0x001808: u'SightLogix, Inc.',
+   0x001809: u'CRESYN',
+   0x00180A: u'Meraki Networks, Inc.',
+   0x00180B: u'Brilliant Telecommunications',
+   0x00180C: u'Optelian Access Networks Corporation',
+   0x00180D: u'Terabytes Server Storage Tech Corp',
+   0x00180E: u'Avega Systems',
+   0x00180F: u'Nokia Danmark A/S',
+   0x001810: u'IPTrade S.A.',
+   0x001811: u'Neuros Technology International, LLC.',
+   0x001812: u'Beijing Xinwei Telecom Technology Co., Ltd.',
+   0x001813: u'Sony Ericsson Mobile Communications',
+   0x001814: u'Mitutoyo Corporation',
+   0x001815: u'GZ Technologies, Inc.',
+   0x001816: u'Ubixon Co., Ltd.',
+   0x001817: u'D. E. Shaw Research, LLC',
+   0x001818: u'Cisco Systems',
+   0x001819: u'Cisco Systems',
+   0x00181A: u'AVerMedia Technologies Inc.',
+   0x00181B: u'TaiJin Metal Co., Ltd.',
+   0x00181C: u'Exterity Limited',
+   0x00181D: u'ASIA ELECTRONICS CO.,LTD',
+   0x00181E: u'GDX Technologies Ltd.',
+   0x00181F: u'Palmmicro Communications',
+   0x001820: u'w5networks',
+   0x001821: u'SINDORICOH',
+   0x001822: u'CEC TELECOM CO.,LTD.',
+   0x001823: u'Delta Electronics, Inc.',
+   0x001824: u'Kimaldi Electronics, S.L.',
+   0x001825: u'Wavion LTD',
+   0x001826: u'Cale Access AB',
+   0x001827: u'NEC PHILIPS UNIFIED SOLUTIONS NEDERLAND BV',
+   0x001828: u'e2v technologies (UK) ltd.',
+   0x001829: u'Gatsometer',
+   0x00182A: u'Taiwan Video & Monitor',
+   0x00182B: u'Softier',
+   0x00182C: u'Ascend Networks, Inc.',
+   0x00182D: u'Artec Group OÜ',
+   0x00182E: u'Wireless Ventures USA',
+   0x00182F: u'Texas Instruments',
+   0x001830: u'Texas Instruments',
+   0x001831: u'Texas Instruments',
+   0x001832: u'Texas Instruments',
+   0x001833: u'Texas Instruments',
+   0x001834: u'Texas Instruments',
+   0x001835: u'ITC',
+   0x001836: u'Reliance Electric Limited',
+   0x001837: u'Universal ABIT Co., Ltd.',
+   0x001838: u'PanAccess Communications,Inc.',
+   0x001839: u'Cisco-Linksys LLC',
+   0x00183A: u'Westell Technologies',
+   0x00183B: u'CENITS Co., Ltd.',
+   0x00183C: u'Encore Software Limited',
+   0x00183D: u'Vertex Link Corporation',
+   0x00183E: u'Digilent, Inc',
+   0x00183F: u'2Wire, Inc',
+   0x001840: u'3 Phoenix, Inc.',
+   0x001841: u'High Tech Computer Corp',
+   0x001842: u'Nokia Danmark A/S',
+   0x001843: u'Dawevision Ltd',
+   0x001844: u'Heads Up Technologies, Inc.',
+   0x001845: u'NPL Pulsar Ltd.',
+   0x001846: u'Crypto S.A.',
+   0x001847: u'AceNet Technology Inc.',
+   0x001848: u'Vecima Networks Inc.',
+   0x001849: u'Pigeon Point Systems',
+   0x00184A: u'Catcher, Inc.',
+   0x00184B: u'Las Vegas Gaming, Inc.',
+   0x00184C: u'Bogen Communications',
+   0x00184D: u'Netgear Inc.',
+   0x00184E: u'Lianhe Technologies, Inc.',
+   0x00184F: u'8 Ways Technology Corp.',
+   0x001850: u'Secfone Kft',
+   0x001851: u'SWsoft',
+   0x001852: u'StorLink Semiconductors, Inc.',
+   0x001853: u'Atera Networks LTD.',
+   0x001854: u'Argard Co., Ltd',
+   0x001855: u'Aeromaritime Systembau GmbH',
+   0x001856: u'EyeFi, Inc',
+   0x001857: u'Unilever R&D',
+   0x001858: u'TagMaster AB',
+   0x001859: u'Strawberry Linux Co.,Ltd.',
+   0x00185A: u'uControl, Inc.',
+   0x00185B: u'Network Chemistry, Inc',
+   0x00185C: u'EDS Lab Pte Ltd',
+   0x00185D: u'TAIGUEN TECHNOLOGY (SHEN-ZHEN) CO., LTD.',
+   0x00185E: u'Nexterm Inc.',
+   0x00185F: u'TAC Inc.',
+   0x001860: u'SIM Technology Group Shanghai Simcom Ltd.,',
+   0x001861: u'Ooma, Inc.',
+   0x001862: u'Seagate Technology',
+   0x001863: u'Veritech Electronics Limited',
+   0x001864: u'Cybectec Inc.',
+   0x001865: u'Bayer Diagnostics Sudbury Ltd',
+   0x001866: u'Leutron Vision',
+   0x001867: u'Evolution Robotics Retail',
+   0x001868: u'Scientific Atlanta, A Cisco Company',
+   0x001869: u'KINGJIM',
+   0x00186A: u'Global Link Digital Technology Co,.LTD',
+   0x00186B: u'Sambu Communics CO., LTD.',
+   0x00186C: u'Neonode AB',
+   0x00186D: u'Zhenjiang Sapphire Electronic Industry CO.',
+   0x00186E: u'3COM Europe Ltd',
+   0x00186F: u'Setha Industria Eletronica LTDA',
+   0x001870: u'E28 Shanghai Limited',
+   0x001871: u'Global Data Services',
+   0x001872: u'Expertise Engineering',
+   0x001873: u'Cisco Systems',
+   0x001874: u'Cisco Systems',
+   0x001875: u'AnaCise Testnology Pte Ltd',
+   0x001876: u'WowWee Ltd.',
+   0x001877: u'Amplex A/S',
+   0x001878: u'Mackware GmbH',
+   0x001879: u'dSys',
+   0x00187A: u'Wiremold',
+   0x00187B: u'4NSYS Co. Ltd.',
+   0x00187C: u'INTERCROSS, LLC',
+   0x00187D: u'Armorlink shanghai Co. Ltd',
+   0x00187E: u'RGB Spectrum',
+   0x00187F: u'ZODIANET',
+   0x001880: u'Mobilygen',
+   0x001881: u'Buyang Electronics Industrial Co., Ltd',
+   0x001882: u'Huawei Technologies Co., Ltd.',
+   0x001883: u'FORMOSA21 INC.',
+   0x001884: u'FON',
+   0x001885: u'Avigilon Corporation',
+   0x001886: u'EL-TECH, INC.',
+   0x001887: u'Metasystem SpA',
+   0x001888: u'GOTIVE a.s.',
+   0x001889: u'WinNet Solutions Limited',
+   0x00188A: u'Infinova LLC',
+   0x00188B: u'Dell',
+   0x00188C: u'Mobile Action Technology Inc.',
+   0x00188D: u'Nokia Danmark A/S',
+   0x00188E: u'Ekahau, Inc.',
+   0x00188F: u'Montgomery Technology, Inc.',
+   0x001890: u'RadioCOM, s.r.o.',
+   0x001891: u'Zhongshan General K-mate Electronics Co., Ltd',
+   0x001892: u'ads-tec GmbH',
+   0x001893: u'SHENZHEN PHOTON BROADBAND TECHNOLOGY CO.,LTD',
+   0x001894: u'zimocom',
+   0x001895: u'Hansun Technologies Inc.',
+   0x001896: u'Great Well Electronic LTD',
+   0x001897: u'JESS-LINK PRODUCTS Co., LTD',
+   0x001898: u'KINGSTATE ELECTRONICS CORPORATION',
+   0x001899: u'ShenZhen jieshun Science&Technology Industry CO,LTD.',
+   0x00189A: u'HANA Micron Inc.',
+   0x00189B: u'Thomson Inc.',
+   0x00189C: u'Weldex Corporation',
+   0x00189D: u'Navcast Inc.',
+   0x00189E: u'OMNIKEY GmbH.',
+   0x00189F: u'Lenntek Corporation',
+   0x0018A0: u'Cierma Ascenseurs',
+   0x0018A1: u'Tiqit Computers, Inc.',
+   0x0018A2: u'XIP Technology AB',
+   0x0018A3: u'ZIPPY TECHNOLOGY CORP.',
+   0x0018A4: u'Motorola Mobile Devices',
+   0x0018A5: u'ADigit Technologies Corp.',
+   0x0018A6: u'Persistent Systems, LLC',
+   0x0018A7: u'Yoggie Security Systems LTD.',
+   0x0018A8: u'AnNeal Technology Inc.',
+   0x0018A9: u'Ethernet Direct Corporation',
+   0x0018AA: u'PRIVATE',
+   0x0018AB: u'BEIJING LHWT MICROELECTRONICS INC.',
+   0x0018AC: u'Shanghai Jiao Da HISYS Technology Co. Ltd.',
+   0x0018AD: u'NIDEC SANKYO CORPORATION',
+   0x0018AE: u'Tongwei Video Technology CO.,LTD',
+   0x0018AF: u'Samsung Electronics Co., Ltd.',
+   0x0018B0: u'Nortel',
+   0x0018B1: u'Blade Network Technologies',
+   0x0018B2: u'ADEUNIS RF',
+   0x0018B3: u'TEC WizHome Co., Ltd.',
+   0x0018B4: u'Dawon Media Inc.',
+   0x0018B5: u'Magna Carta',
+   0x0018B6: u'S3C, Inc.',
+   0x0018B7: u'D3 LED, LLC',
+   0x0018B8: u'New Voice International AG',
+   0x0018B9: u'Cisco Systems',
+   0x0018BA: u'Cisco Systems',
+   0x0018BB: u'Eliwell Controls srl',
+   0x0018BC: u'ZAO NVP Bolid',
+   0x0018BD: u'SHENZHEN DVBWORLD TECHNOLOGY CO., LTD.',
+   0x0018BE: u'ANSA Corporation',
+   0x0018BF: u'Essence Technology Solution, Inc.',
+   0x0018C0: u'Motorola CHS',
+   0x0018C1: u'Almitec Informática e Comércio Ltda.',
+   0x0018C2: u'Firetide, Inc',
+   0x0018C3: u'C&S Microwave',
+   0x0018C4: u'Raba Technologies LLC',
+   0x0018C5: u'Nokia Danmark A/S',
+   0x0018C6: u'OPW Fuel Management Systems',
+   0x0018C7: u'Real Time Automation',
+   0x0018C8: u'ISONAS Inc.',
+   0x0018C9: u'EOps Technology Limited',
+   0x0018CA: u'Viprinet GmbH',
+   0x0018CB: u'Tecobest Technology Limited',
+   0x0018CC: u'AXIOHM SAS',
+   0x0018CD: u'Erae Electronics Industry Co., Ltd',
+   0x0018CE: u'Dreamtech Co., Ltd',
+   0x0018CF: u'Baldor Electric Company',
+   0x0018D0: u'@ROAD Inc',
+   0x0018D1: u'Siemens Home & Office Comm. Devices',
+   0x0018D2: u'High-Gain Antennas LLC',
+   0x0018D3: u'TEAMCAST',
+   0x0018D4: u'Unified Display Interface SIG',
+   0x0018D5: u'REIGNCOM',
+   0x0018D6: u'Swirlnet A/S',
+   0x0018D7: u'Javad Navigation Systems Inc.',
+   0x0018D8: u'ARCH METER Corporation',
+   0x0018D9: u'Santosha Internatonal, Inc',
+   0x0018DA: u'AMBER wireless GmbH',
+   0x0018DB: u'EPL Technology Ltd',
+   0x0018DC: u'Prostar Co., Ltd.',
+   0x0018DD: u'Silicondust Engineering Ltd',
+   0x0018DE: u'Intel Corporation',
+   0x0018DF: u'The Morey Corporation',
+   0x0018E0: u'ANAVEO',
+   0x0018E1: u'Verkerk Service Systemen',
+   0x0018E2: u'Topdata Sistemas de Automacao Ltda',
+   0x0018E3: u'Visualgate Systems, Inc.',
+   0x0018E4: u'YIGUANG',
+   0x0018E5: u'Adhoco AG',
+   0x0018E6: u'Computer Hardware Design SIA',
+   0x0018E7: u'Cameo Communications, INC.',
+   0x0018E8: u'Hacetron Corporation',
+   0x0018E9: u'Numata Corporation',
+   0x0018EA: u'Alltec GmbH',
+   0x0018EB: u'BroVis Wireless Networks',
+   0x0018EC: u'Welding Technology Corporation',
+   0x0018ED: u'ACCUTECH INTERNATIONAL CO., LTD.',
+   0x0018EE: u'Videology Imaging Solutions, Inc.',
+   0x0018EF: u'Escape Communications, Inc.',
+   0x0018F0: u'JOYTOTO Co., Ltd.',
+   0x0018F1: u'Chunichi Denshi Co.,LTD.',
+   0x0018F2: u'Beijing Tianyu Communication Equipment Co., Ltd',
+   0x0018F3: u'ASUSTek COMPUTER INC.',
+   0x0018F4: u'EO TECHNICS Co., Ltd.',
+   0x0018F5: u'Shenzhen Streaming Video Technology Company Limited',
+   0x0018F6: u'Thomson Telecom Belgium',
+   0x0018F7: u'Kameleon Technologies',
+   0x0018F8: u'Cisco-Linksys LLC',
+   0x0018F9: u'VVOND, Inc.',
+   0x0018FA: u'Yushin Precision Equipment Co.,Ltd.',
+   0x0018FB: u'Compro Technology',
+   0x0018FC: u'Altec Electronic AG',
+   0x0018FD: u'Optimal Technologies International Inc.',
+   0x0018FE: u'Hewlett Packard',
+   0x0018FF: u'PowerQuattro Co.',
+   0x001900: u'Intelliverese - DBA Voicecom',
+   0x001901: u'F1MEDIA',
+   0x001902: u'Cambridge Consultants Ltd',
+   0x001903: u'Bigfoot Networks Inc',
+   0x001904: u'WB Electronics Sp. z o.o.',
+   0x001905: u'SCHRACK Seconet AG',
+   0x001906: u'Cisco Systems',
+   0x001907: u'Cisco Systems',
+   0x001908: u'Duaxes Corporation',
+   0x001909: u'Devi A/S',
+   0x00190A: u'HASWARE INC.',
+   0x00190B: u'Southern Vision Systems, Inc.',
+   0x00190C: u'Encore Electronics, Inc.',
+   0x00190D: u'IEEE 1394c',
+   0x00190E: u'Atech Technology Co., Ltd.',
+   0x00190F: u'Advansus Corp.',
+   0x001910: u'Knick Elektronische Messgeraete GmbH & Co. KG',
+   0x001911: u'Just In Mobile Information Technologies (Shanghai) Co., Ltd.',
+   0x001912: u'Welcat Inc',
+   0x001913: u'Chuang-Yi Network Equipment Co.Ltd.',
+   0x001914: u'Winix Co., Ltd',
+   0x001915: u'TECOM Co., Ltd.',
+   0x001916: u'PayTec AG',
+   0x001917: u'Posiflex Inc.',
+   0x001918: u'Interactive Wear AG',
+   0x001919: u'ASTEL Inc.',
+   0x00191A: u'IRLINK',
+   0x00191B: u'Sputnik Engineering AG',
+   0x00191C: u'Sensicast Systems',
+   0x00191D: u'Nintendo Co.,Ltd.',
+   0x00191E: u'Beyondwiz Co., Ltd.',
+   0x00191F: u'Microlink communications Inc.',
+   0x001920: u'KUME electric Co.,Ltd.',
+   0x001921: u'Elitegroup Computer System Co.',
+   0x001922: u'CM Comandos Lineares',
+   0x001923: u'Phonex Korea Co., LTD.',
+   0x001924: u'LBNL  Engineering',
+   0x001925: u'Intelicis Corporation',
+   0x001926: u'BitsGen Co., Ltd.',
+   0x001927: u'ImCoSys Ltd',
+   0x001928: u'Siemens AG, Transportation Systems',
+   0x001929: u'2M2B Montadora de Maquinas Bahia Brasil LTDA',
+   0x00192A: u'Antiope Associates',
+   0x00192B: u'Hexagram, Inc.',
+   0x00192C: u'Motorola Mobile Devices',
+   0x00192D: u'Nokia Corporation',
+   0x00192E: u'Spectral Instruments, Inc.',
+   0x00192F: u'Cisco Systems',
+   0x001930: u'Cisco Systems',
+   0x001931: u'Balluff GmbH',
+   0x001932: u'Gude Analog- und Digialsysteme GmbH',
+   0x001933: u'Strix Systems, Inc.',
+   0x001934: u'TRENDON TOUCH TECHNOLOGY CORP.',
+   0x001935: u'Duerr Dental GmbH & Co. KG',
+   0x001936: u'STERLITE OPTICAL TECHNOLOGIES LIMITED',
+   0x001937: u'CommerceGuard AB',
+   0x001938: u'UMB Communications Co., Ltd.',
+   0x001939: u'Gigamips',
+   0x00193A: u'OESOLUTIONS',
+   0x00193B: u'Deliberant LLC',
+   0x00193C: u'HighPoint Technologies Incorporated',
+   0x00193D: u'GMC Guardian Mobility Corp.',
+   0x00193E: u'PIRELLI BROADBAND SOLUTIONS',
+   0x00193F: u'RDI technology(Shenzhen) Co.,LTD',
+   0x001940: u'Rackable Systems',
+   0x001941: u'Pitney Bowes, Inc',
+   0x001942: u'ON SOFTWARE INTERNATIONAL LIMITED',
+   0x001943: u'Belden',
+   0x001944: u'Fossil Partners, L.P.',
+   0x001945: u'Ten-Tec Inc.',
+   0x001946: u'Cianet Industria e Comercio S/A',
+   0x001947: u'Scientific Atlanta, A Cisco Company',
+   0x001948: u'AireSpider Networks',
+   0x001949: u'TENTEL  COMTECH CO., LTD.',
+   0x00194A: u'TESTO AG',
+   0x00194B: u'SAGEM COMMUNICATION',
+   0x00194C: u'Fujian Stelcom information & Technology CO.,Ltd',
+   0x00194D: u'Avago Technologies Sdn Bhd',
+   0x00194E: u'Ultra Electronics - TCS (Tactical Communication Systems)',
+   0x00194F: u'Nokia Danmark A/S',
+   0x001950: u'Harman Multimedia',
+   0x001951: u'NETCONS, s.r.o.',
+   0x001952: u'ACOGITO Co., Ltd',
+   0x001953: u'Chainleader Communications Corp.',
+   0x001954: u'Leaf Corporation.',
+   0x001955: u'Cisco Systems',
+   0x001956: u'Cisco Systems',
+   0x001957: u'Saafnet Canada Inc.',
+   0x001958: u'Bluetooth SIG, Inc.',
+   0x001959: u'Staccato Communications Inc.',
+   0x00195A: u'Jenaer Antriebstechnik GmbH',
+   0x00195B: u'D-Link Corporation',
+   0x00195C: u'Innotech Corporation',
+   0x00195D: u'ShenZhen XinHuaTong Opto Electronics Co.,Ltd',
+   0x00195E: u'Motorola CHS',
+   0x00195F: u'Valemount Networks Corporation',
+   0x001960: u'DoCoMo Systems, Inc.',
+   0x001961: u'Blaupunkt GmbH',
+   0x001962: u'Commerciant, LP',
+   0x001963: u'Sony Ericsson Mobile Communications AB',
+   0x001964: u'Doorking Inc.',
+   0x001965: u'YuHua TelTech (ShangHai) Co., Ltd.',
+   0x001966: u'Asiarock Technology Limited',
+   0x001967: u'TELDAT Sp.J.',
+   0x001968: u'Digital Video Networks(Shanghai) CO. LTD.',
+   0x001969: u'Nortel',
+   0x00196A: u'MikroM GmbH',
+   0x00196B: u'Danpex Corporation',
+   0x00196C: u'ETROVISION TECHNOLOGY',
+   0x00196D: u'Raybit Systems Korea, Inc',
+   0x00196E: u'Metacom (Pty) Ltd.',
+   0x00196F: u'SensoPart GmbH',
+   0x001970: u'Z-Com, Inc.',
+   0x001971: u'Guangzhou Unicomp Technology Co.,Ltd',
+   0x001972: u'Plexus (Xiamen) Co.,ltd',
+   0x001973: u'Zeugma Systems',
+   0x001974: u'AboCom Systems, Inc.',
+   0x001975: u'Beijing Huisen networks technology Inc',
+   0x001976: u'Xipher Technologies, LLC',
+   0x001977: u'Aerohive Networks, Inc.',
+   0x001978: u'Datum Systems, Inc.',
+   0x001979: u'Nokia Danmark A/S',
+   0x00197A: u'MAZeT GmbH',
+   0x00197B: u'Picotest Corp.',
+   0x00197C: u'Riedel Communications GmbH',
+   0x00197D: u'Hon Hai Precision Ind. Co., Ltd',
+   0x00197E: u'Hon Hai Precision Ind. Co., Ltd',
+   0x00197F: u'PLANTRONICS, INC.',
+   0x001980: u'Gridpoint Systems',
+   0x001981: u'Vivox Inc',
+   0x001982: u'SmarDTV',
+   0x001983: u'CCT R&D Limited',
+   0x001984: u'ESTIC Corporation',
+   0x001985: u'IT Watchdogs, Inc',
+   0x001986: u'Cheng Hongjian',
+   0x001987: u'Panasonic Mobile Communications Co., Ltd.',
+   0x001988: u'Wi2Wi, Inc',
+   0x001989: u'Sonitrol Corporation',
+   0x00198A: u'Northrop Grumman Systems Corp.',
+   0x00198B: u'Novera Optics Korea, Inc.',
+   0x00198C: u'iXSea',
+   0x00198D: u'Ocean Optics, Inc.',
+   0x00198E: u'Oticon A/S',
+   0x00198F: u'Alcatel Bell N.V.',
+   0x001990: u'ELM DATA Co., Ltd.',
+   0x001991: u'avinfo',
+   0x001992: u'Bluesocket, Inc',
+   0x001993: u'Changshu Switchgear MFG. Co.,Ltd. (Former Changshu Switchgea',
+   0x001994: u'Jorjin technologies inc.',
+   0x001995: u'Jurong Hi-Tech (Suzhou)Co.ltd',
+   0x001996: u'TurboChef Technologies Inc.',
+   0x001997: u'Soft Device Sdn Bhd',
+   0x001998: u'SATO CORPORATION',
+   0x001999: u'Fujitsu Siemens Computers',
+   0x00199A: u'EDO-EVI',
+   0x00199B: u'Diversified Technical Systems, Inc.',
+   0x00199C: u'CTRING',
+   0x00199D: u'V, Inc.',
+   0x00199E: u'SHOWADENSHI ELECTRONICS,INC.',
+   0x00199F: u'DKT A/S',
+   0x0019A0: u'NIHON DATA SYSTENS, INC.',
+   0x0019A1: u'LG INFORMATION & COMM.',
+   0x0019A2: u'ORION TELE-EQUIPMENTS PVT LTD',
+   0x0019A3: u'asteel electronique atlantique',
+   0x0019A4: u'Austar Technology (hang zhou) Co.,Ltd',
+   0x0019A5: u'RadarFind Corporation',
+   0x0019A6: u'Motorola CHS',
+   0x0019A7: u'ITU-T',
+   0x0019A8: u'WiQuest Communications, Inc',
+   0x0019A9: u'Cisco Systems',
+   0x0019AA: u'Cisco Systems',
+   0x0019AB: u'Raycom CO ., LTD',
+   0x0019AC: u'GSP SYSTEMS Inc.',
+   0x0019AD: u'BOBST SA',
+   0x0019AE: u'Hopling Technologies b.v.',
+   0x0019AF: u'Rigol Technologies, Inc.',
+   0x0019B0: u'HanYang System',
+   0x0019B1: u'Arrow7 Corporation',
+   0x0019B2: u'XYnetsoft Co.,Ltd',
+   0x0019B3: u'Stanford Research Systems',
+   0x0019B4: u'VideoCast Ltd.',
+   0x0019B5: u'Famar Fueguina S.A.',
+   0x0019B6: u'Euro Emme s.r.l.',
+   0x0019B7: u'Nokia Danmark A/S',
+   0x0019B8: u'Boundary Devices',
+   0x0019B9: u'Dell Inc.',
+   0x0019BA: u'Paradox Security Systems Ltd',
+   0x0019BB: u'Hewlett Packard',
+   0x0019BC: u'ELECTRO CHANCE SRL',
+   0x0019BD: u'New Media Life',
+   0x0019BE: u'Altai Technologies Limited',
+   0x0019BF: u'Citiway technology Co.,ltd',
+   0x0019C0: u'Motorola Mobile Devices',
+   0x0019C1: u'Alps Electric Co., Ltd',
+   0x0019C2: u'Equustek Solutions, Inc.',
+   0x0019C3: u'Qualitrol',
+   0x0019C4: u'Infocrypt Inc.',
+   0x0019C5: u'SONY Computer Entertainment inc,',
+   0x0019C6: u'ZTE Corporation',
+   0x0019C7: u'Cambridge Industries(Group) Co.,Ltd.',
+   0x0019C8: u'AnyDATA Corporation',
+   0x0019C9: u'S&C ELECTRIC COMPANY',
+   0x0019CA: u'Broadata Communications, Inc',
+   0x0019CB: u'ZyXEL Communications Corporation',
+   0x0019CC: u'RCG (HK) Ltd',
+   0x0019CD: u'Chengdu ethercom information technology Ltd.',
+   0x0019CE: u'Progressive Gaming International',
+   0x0019CF: u'SALICRU, S.A.',
+   0x0019D0: u'Cathexis',
+   0x0019D1: u'Intel Corporation',
+   0x0019D2: u'Intel Corporation',
+   0x0019D3: u'TRAK Microwave',
+   0x0019D4: u'ICX Technologies',
+   0x0019D5: u'IP Innovations, Inc.',
+   0x0019D6: u'LS Cable Ltd.',
+   0x0019D7: u'FORTUNETEK CO., LTD',
+   0x0019D8: u'MAXFOR',
+   0x0019D9: u'Zeutschel GmbH',
+   0x0019DA: u'Welltrans O&E Technology Co. , Ltd.',
+   0x0019DB: u'MICRO-STAR INTERNATIONAL CO., LTD.',
+   0x0019DC: u'ENENSYS Technologies',
+   0x0019DD: u'FEI-Zyfer, Inc.',
+   0x0019DE: u'MOBITEK',
+   0x0019DF: u'THOMSON APDG',
+   0x0019E0: u'TP-LINK Technologies Co., Ltd.',
+   0x0019E1: u'Nortel',
+   0x0019E2: u'Juniper Networks',
+   0x0019E3: u'Apple Computers',
+   0x0019E4: u'2Wire, Inc',
+   0x0019E5: u'Lynx Studio Technology, Inc.',
+   0x0019E6: u'TOYO MEDIC CO.,LTD.',
+   0x0019E7: u'Cisco Systems',
+   0x0019E8: u'Cisco Systems',
+   0x0019E9: u'S-Information Technolgy, Co., Ltd.',
+   0x0019EA: u'TeraMage Technologies Co., Ltd.',
+   0x0019EB: u'Pyronix Ltd',
+   0x0019EC: u'Sagamore Systems, Inc.',
+   0x0019ED: u'Axesstel Inc.',
+   0x0019EE: u'CARLO GAVAZZI CONTROLS SPA-Controls Division',
+   0x0019EF: u'SHENZHEN LINNKING ELECTRONICS CO.,LTD',
+   0x0019F0: u'UNIONMAN TECHNOLOGY CO.,LTD',
+   0x0019F1: u'Star Communication Network Technology Co.,Ltd',
+   0x0019F2: u'Teradyne K.K.',
+   0x0019F3: u'Telematrix, Inc',
+   0x0019F4: u'Convergens Oy Ltd',
+   0x0019F5: u'Imagination Technologies Ltd',
+   0x0019F6: u'Acconet (PTE) Ltd',
+   0x0019F7: u'Onset Computer Corporation',
+   0x0019F8: u'Embedded Systems Design, Inc.',
+   0x0019F9: u'Lambda',
+   0x0019FA: u'Cable Vision Electronics CO., LTD.',
+   0x0019FB: u'AMSTRAD PLC',
+   0x0019FC: u'PT. Ufoakses Sukses Luarbiasa',
+   0x0019FD: u'Nintendo Co., Ltd.',
+   0x0019FE: u'SHENZHEN SEECOMM TECHNOLOGY CO.,LTD.',
+   0x0019FF: u'Finnzymes',
+   0x001A00: u'MATRIX INC.',
+   0x001A01: u'Smiths Medical',
+   0x001A02: u'SECURE CARE PRODUCTS, INC',
+   0x001A03: u'Angel Electronics Co., Ltd.',
+   0x001A04: u'Interay Solutions BV',
+   0x001A05: u'OPTIBASE LTD',
+   0x001A06: u'OpVista, Inc.',
+   0x001A07: u'Arecont Vision',
+   0x001A08: u'Dalman Technical Services',
+   0x001A09: u'Wayfarer Transit Systems Ltd',
+   0x001A0A: u'Adaptive Micro-Ware Inc.',
+   0x001A0B: u'BONA TECHNOLOGY INC.',
+   0x001A0C: u'Swe-Dish Satellite Systems AB',
+   0x001A0D: u'HandHeld entertainment, Inc.',
+   0x001A0E: u'Cheng Uei Precision Industry Co.,Ltd',
+   0x001A0F: u'Sistemas Avanzados de Control, S.A.',
+   0x001A10: u'LUCENT TRANS ELECTRONICS CO.,LTD',
+   0x001A11: u'Google Inc.',
+   0x001A12: u'PRIVATE',
+   0x001A13: u'Wanlida Group Co., LTD',
+   0x001A14: u'Xin Hua Control Engineering Co.,Ltd.',
+   0x001A15: u'gemalto e-Payment',
+   0x001A16: u'Nokia Danmark A/S',
+   0x001A17: u'Teak Technologies, Inc.',
+   0x001A18: u'Advanced Simulation Technology inc.',
+   0x001A19: u'Computer Engineering Limited',
+   0x001A1A: u'Gentex Corporation/Electro-Acoustic Products',
+   0x001A1B: u'Motorola Mobile Devices',
+   0x001A1C: u'GT&T Engineering Pte Ltd',
+   0x001A1D: u'PChome Online Inc.',
+   0x001A1E: u'Aruba Networks',
+   0x001A1F: u'Coastal Environmental Systems',
+   0x001A20: u'CMOTECH Co. Ltd.',
+   0x001A21: u'Indac B.V.',
+   0x001A22: u'eq-3 GmbH',
+   0x001A23: u'Ice Qube, Inc',
+   0x001A24: u'Galaxy Telecom Technologies Ltd',
+   0x001A25: u'DELTA DORE',
+   0x001A26: u'Deltanode Solutions AB',
+   0x001A27: u'Ubistar',
+   0x001A28: u'ASWT Co., LTD. Taiwan Branch H.K.',
+   0x001A29: u'Techsonic Industries d/b/a Humminbird',
+   0x001A2A: u'Arcadyan Technology Corporation',
+   0x001A2B: u'Ayecom Technology Co., Ltd.',
+   0x001A2C: u'SATEC Co.,LTD',
+   0x001A2D: u'The Navvo Group',
+   0x001A2E: u'Ziova Coporation',
+   0x001A2F: u'Cisco Systems',
+   0x001A30: u'Cisco Systems',
+   0x001A31: u'SCAN COIN Industries AB',
+   0x001A32: u'ACTIVA MULTIMEDIA',
+   0x001A33: u'ASI Communications, Inc.',
+   0x001A34: u'Konka Group Co., Ltd.',
+   0x001A35: u'BARTEC GmbH',
+   0x001A36: u'Actimon GmbH & Co. KG',
+   0x001A37: u'Lear Corporation',
+   0x001A38: u'SCI Technology',
+   0x001A39: u'Merten GmbH&CoKG',
+   0x001A3A: u'Dongahelecomm',
+   0x001A3B: u'Doah Elecom Inc.',
+   0x001A3C: u'Technowave Ltd.',
+   0x001A3D: u'Ajin Vision Co.,Ltd',
+   0x001A3E: u'Faster Technology LLC',
+   0x001A3F: u'intelbras',
+   0x001A40: u'A-FOUR TECH CO., LTD.',
+   0x001A41: u'INOCOVA Co.,Ltd',
+   0x001A42: u'Techcity Technology co., Ltd.',
+   0x001A43: u'Logical Link Communications',
+   0x001A44: u'JWTrading Co., Ltd',
+   0x001A45: u'GN Netcom as',
+   0x001A46: u'Digital Multimedia Technology Co., Ltd',
+   0x001A47: u'Agami Systems, Inc.',
+   0x001A48: u'Takacom Corporation',
+   0x001A49: u'Micro Vision Co.,LTD',
+   0x001A4A: u'Qumranet Inc.',
+   0x001A4B: u'Hewlett Packard',
+   0x001A4C: u'Crossbow Technology, Inc',
+   0x001A4D: u'GIGABYTE TECHNOLOGY CO.,LTD.',
+   0x001A4E: u'NTI AG / LinMot',
+   0x001A4F: u'AVM GmbH',
+   0x001A50: u'PheeNet Technology Corp.',
+   0x001A51: u'Alfred Mann Foundation',
+   0x001A52: u'Meshlinx Wireless Inc.',
+   0x001A53: u'Zylaya',
+   0x001A54: u'Hip Shing Electronics Ltd.',
+   0x001A55: u'ACA-Digital Corporation',
+   0x001A56: u'ViewTel Co,. Ltd.',
+   0x001A57: u'Matrix Design Group, LLC',
+   0x001A58: u'Celectronic GmbH',
+   0x001A59: u'Ircona',
+   0x001A5A: u'Korea Electric Power Data Network  (KDN) Co., Ltd',
+   0x001A5B: u'NetCare Service Co., Ltd.',
+   0x001A5C: u'Euchner GmbH+Co. KG',
+   0x001A5D: u'Mobinnova Corp.',
+   0x001A5E: u'Thincom Technology Co.,Ltd',
+   0x001A5F: u'KitWorks.fi Ltd.',
+   0x001A60: u'Wave Electronics Co.,Ltd.',
+   0x001A61: u'PacStar Corp.',
+   0x001A62: u'trusted data',
+   0x001A63: u'Elster Electricity, LLC',
+   0x001A64: u'IBM Corp.',
+   0x001A65: u'Seluxit',
+   0x001A66: u'Motorola CHS',
+   0x001A67: u'Infinite QL Sdn Bhd',
+   0x001A68: u'Weltec Enterprise Co., Ltd.',
+   0x001A69: u'Wuhan Yangtze Optical Technology CO.,Ltd.',
+   0x001A6A: u'Tranzas, Inc.',
+   0x001A6B: u'USI',
+   0x001A6C: u'Cisco Systems',
+   0x001A6D: u'Cisco Systems',
+   0x001A6E: u'Impro Technologies',
+   0x001A6F: u'MI.TEL s.r.l.',
+   0x001A70: u'Cisco-Linksys, LLC',
+   0x001A71: u'Diostech Co., Ltd.',
+   0x001A72: u'Mosart Semiconductor Corp.',
+   0x001A73: u'Gemtek Technology Co., Ltd.',
+   0x001A74: u'Procare International Co',
+   0x001A75: u'Sony Ericsson Mobile Communications',
+   0x001A76: u'SDT information Technology Co.,LTD.',
+   0x001A77: u'Motorola Mobile Devices',
+   0x001A78: u'ubtos',
+   0x001A79: u'TELECOMUNICATION TECHNOLOGIES LTD.',
+   0x001A7A: u'Lismore Instruments Limited',
+   0x001A7B: u'Teleco, Inc.',
+   0x001A7C: u'Hirschmann Automation and Control B.V.',
+   0x001A7D: u'cyber-blue(HK)Ltd',
+   0x001A7E: u'LN Srithai Comm Ltd.',
+   0x001A7F: u'GCI Science&Technology Co.,Ltd.',
+   0x001A80: u'Sony Corporation',
+   0x001A81: u'Zelax',
+   0x001A82: u'PROBA Building Automation Co.,LTD',
+   0x001A83: u'Pegasus Technologies Inc.',
+   0x001A84: u'V One Multimedia Pte Ltd',
+   0x001A85: u'NV Michel Van de Wiele',
+   0x001A86: u'AdvancedIO Systems Inc',
+   0x001A87: u'Canhold International Limited',
+   0x001A88: u'Venergy,Co,Ltd',
+   0x001A89: u'Nokia Danmark A/S',
+   0x001A8A: u'Samsung Electronics Co., Ltd.',
+   0x001A8B: u'CHUNIL ELECTRIC IND., CO.',
+   0x001A8C: u'Astaro AG',
+   0x001A8D: u'AVECS Bergen GmbH',
+   0x001A8E: u'3Way Networks Ltd',
+   0x001A8F: u'Nortel',
+   0x001A90: u'Trópico Sistemas e Telecomunicações da Amazônia LTDA.',
+   0x001A91: u'FusionDynamic Ltd.',
+   0x001A92: u'ASUSTek COMPUTER INC.',
+   0x001A93: u'ERCO Leuchten GmbH',
+   0x001A94: u'Votronic GmbH',
+   0x001A95: u'Hisense Mobile Communications Technoligy Co.,Ltd.',
+   0x001A96: u'ECLER S.A.',
+   0x001A97: u'fitivision technology Inc.',
+   0x001A98: u'Asotel Communication Limited Taiwan Branch',
+   0x001A99: u'Smarty (HZ) Information Electronics Co., Ltd',
+   0x001A9A: u'Skyworth Digital technology(shenzhen)co.ltd.',
+   0x001A9B: u'ADEC & Parter AG',
+   0x001A9C: u'RightHand Technologies, Inc.',
+   0x001A9D: u'Skipper Wireless, Inc.',
+   0x001A9E: u'ICON Digital International Limited',
+   0x001A9F: u'A-Link Europe Ltd',
+   0x001AA0: u'Dell Inc',
+   0x001AA1: u'Cisco Systems',
+   0x001AA2: u'Cisco Systems',
+   0x001AA3: u'DELORME',
+   0x001AA4: u'Future University-Hakodate',
+   0x001AA5: u'BRN Phoenix',
+   0x001AA6: u'Telefunken Radio Communication Systems GmbH &CO.KG',
+   0x001AA7: u'Torian Wireless',
+   0x001AA8: u'Mamiya Digital Imaging Co., Ltd.',
+   0x001AA9: u'FUJIAN STAR-NET COMMUNICATION CO.,LTD',
+   0x001AAA: u'Analogic Corp.',
+   0x001AAB: u'eWings s.r.l.',
+   0x001AAC: u'Corelatus AB',
+   0x001AAD: u'Motorola CHS',
+   0x001AAE: u'Savant Systems LLC',
+   0x001AAF: u'BLUSENS TECHNOLOGY',
+   0x001AB0: u'Signal Networks Pvt. Ltd.,',
+   0x001AB1: u'Asia Pacific Satellite Industries Co., Ltd.',
+   0x001AB2: u'Cyber Solutions Inc.',
+   0x001AB3: u'VISIONITE INC.',
+   0x001AB4: u'FFEI Ltd.',
+   0x001AB5: u'Home Network System',
+   0x001AB6: u'Luminary Micro Inc',
+   0x001AB7: u'Ethos Networks LTD.',
+   0x001AB8: u'Anseri Corporation',
+   0x001AB9: u'PMC',
+   0x001ABA: u'Caton Overseas Limited',
+   0x001ABB: u'Fontal Technology Incorporation',
+   0x001ABC: u'U4EA Technologies Ltd',
+   0x001ABD: u'Impatica Inc.',
+   0x001ABE: u'COMPUTER HI-TECH INC.',
+   0x001ABF: u'TRUMPF Laser Marking Systems AG',
+   0x001AC0: u'JOYBIEN TECHNOLOGIES CO., LTD.',
+   0x001AC1: u'3COM EUROPE',
+   0x001AC2: u'YEC Co.,Ltd.',
+   0x001AC3: u'Scientific-Atlanta, Inc',
+   0x001AC4: u'2Wire, Inc',
+   0x001AC5: u'BreakingPoint Systems, Inc.',
+   0x001AC6: u'Micro Control Designs',
+   0x001AC7: u'UNIPOINT',
+   0x001AC8: u'ISL (Instrumentation Scientifique de Laboratoire)',
+   0x001AC9: u'SUZUKEN CO.,LTD',
+   0x001ACA: u'Tilera Corporation',
+   0x001ACB: u'Autocom Products Ltd',
+   0x001ACC: u'Celestial Semiconductor, Ltd',
+   0x001ACD: u'Tidel Engineering LP',
+   0x001ACE: u'YUPITERU INDUSTRIES CO., LTD.',
+   0x001ACF: u'C.T. ELETTRONICA',
+   0x001AD0: u'Siemens Schweiz AG',
+   0x001AD1: u'FARGO CO., LTD.',
+   0x001AD2: u'Eletronica Nitron Ltda',
+   0x001AD3: u'Vamp Ltd.',
+   0x001AD4: u'iPOX Technology Co., Ltd.',
+   0x001AD5: u'KMC CHAIN INDUSTRIAL CO., LTD.',
+   0x001AD6: u'JIAGNSU AETNA ELECTRIC CO.,LTD',
+   0x001AD7: u'Christie Digital Systems, Inc.',
+   0x001AD8: u'AlsterAero GmbH',
+   0x001AD9: u'International Broadband Electric Communications, Inc.',
+   0x001ADA: u'Biz-2-Me Inc.',
+   0x001ADB: u'Motorola Mobile Devices',
+   0x001ADC: u'Nokia Danmark A/S',
+   0x001ADD: u'PePWave Ltd',
+   0x001ADE: u'Motorola CHS',
+   0x001ADF: u'Interactivetv Pty Limited',
+   0x001AE0: u'Mythology Tech Express Inc.',
+   0x001AE1: u'EDGE ACCESS INC',
+   0x001AE2: u'Cisco Systems',
+   0x001AE3: u'Cisco Systems',
+   0x001AE4: u'Liposonix Inc,',
+   0x001AE5: u'Mvox Technologies Inc.',
+   0x001AE6: u'Atlanta Advanced Communications Holdings Limited',
+   0x001AE7: u'Aztek Networks, Inc.',
+   0x001AE8: u'Siemens Enterprise Communications GmbH & Co. KG',
+   0x001AE9: u'Nintendo Co., Ltd.',
+   0x001AEA: u'Radio Terminal Systems Pty Ltd',
+   0x001AEB: u'Allied Telesis K.K.',
+   0x001AEC: u'Keumbee Electronics Co.,Ltd.',
+   0x001AED: u'INCOTEC GmbH',
+   0x001AEE: u'Shenztech Ltd',
+   0x001AEF: u'Loopcomm Technology, Inc.',
+   0x001AF0: u'Alcatel - IPD',
+   0x001AF1: u'Embedded Artists AB',
+   0x001AF2: u'Dynavisions GmbH',
+   0x001AF3: u'Samyoung Electronics',
+   0x001AF4: u'Handreamnet',
+   0x001AF5: u'PENTAONE. CO., LTD.',
+   0x001AF6: u'Woven Systems, Inc.',
+   0x001AF7: u'dataschalt e+a GmbH',
+   0x001AF8: u'Copley Controls Corporation',
+   0x001AF9: u'AeroVIronment (AV Inc)',
+   0x001AFA: u'Welch Allyn, Inc.',
+   0x001AFB: u'Joby Inc.',
+   0x001AFC: u'ModusLink Corporation',
+   0x001AFD: u'EVOLIS',
+   0x001AFE: u'SOFACREAL',
+   0x001AFF: u'Wizyoung Tech.',
+   0x001B00: u'Neopost Technologies',
+   0x001B01: u'Applied Radio Technologies',
+   0x001B02: u'ED Co.Ltd',
+   0x001B03: u'Action Technology (SZ) Co., Ltd',
+   0x001B04: u'Affinity International S.p.a',
+   0x001B05: u'Young Media Concepts GmbH',
+   0x001B06: u'Ateliers R. LAUMONIER',
+   0x001B07: u'Mendocino Software',
+   0x001B08: u'Danfoss Drives A/S',
+   0x001B09: u'Matrix Telecom Pvt. Ltd.',
+   0x001B0A: u'Intelligent Distributed Controls Ltd',
+   0x001B0B: u'Phidgets Inc.',
+   0x001B0C: u'Cisco Systems',
+   0x001B0D: u'Cisco Systems',
+   0x001B0E: u'InoTec GmbH Organisationssysteme',
+   0x001B0F: u'Petratec',
+   0x001B10: u'ShenZhen Kang Hui Technology Co.,ltd',
+   0x001B11: u'D-Link Corporation',
+   0x001B12: u'Apprion',
+   0x001B13: u'Icron Technologies Corporation',
+   0x001B14: u'Carex Lighting Equipment Factory',
+   0x001B15: u'Voxtel, Inc.',
+   0x001B16: u'Celtro Ltd.',
+   0x001B17: u'Palo Alto Networks',
+   0x001B18: u'Tsuken Electric Ind. Co.,Ltd',
+   0x001B19: u'IEEE 1588 Standard',
+   0x001B1A: u'e-trees Japan, Inc.',
+   0x001B1B: u'Siemens AG, A&D AS EWK PU1',
+   0x001B1C: u'Coherent',
+   0x001B1D: u'Phoenix International Co., Ltd',
+   0x001B1E: u'HART Communication Foundation',
+   0x001B1F: u'DELTA - Danish Electronics, Light & Acoustics',
+   0x001B20: u'TPine Technology',
+   0x001B21: u'Intel Corporate',
+   0x001B22: u'Palit Microsystems ( H.K.) Ltd.',
+   0x001B23: u'SimpleComTools',
+   0x001B24: u'Quanta Computer Inc.',
+   0x001B25: u'Nortel',
+   0x001B26: u'RON-Telecom ZAO',
+   0x001B27: u'Merlin CSI',
+   0x001B28: u'POLYGON, JSC',
+   0x001B29: u'Avantis.Co.,Ltd',
+   0x001B2A: u'Cisco Systems',
+   0x001B2B: u'Cisco Systems',
+   0x001B2C: u'ATRON electronic GmbH',
+   0x001B2D: u'PRIVATE',
+   0x001B2E: u'Sinkyo Electron Inc',
+   0x001B2F: u'NETGEAR Inc.',
+   0x001B30: u'Solitech Inc.',
+   0x001B31: u'Neural Image. Co. Ltd.',
+   0x001B32: u'QLogic Corporation',
+   0x001B33: u'Nokia Danmark A/S',
+   0x001B34: u'Focus System Inc.',
+   0x001B35: u'ChongQing JINOU Science & Technology Development CO.,Ltd',
+   0x001B36: u'Tsubata Engineering Co.,Ltd. (Head Office)',
+   0x001B37: u'Computec Oy',
+   0x001B38: u'COMPAL ELECTRONICS TECHNOLOGIC CO., LTD.',
+   0x001B39: u'Proxicast',
+   0x001B3A: u'SIMS Corp.',
+   0x001B3B: u'Yi-Qing CO., LTD',
+   0x001B3C: u'Software Technologies Group,Inc.',
+   0x001B3D: u'EuroTel Spa',
+   0x001B3E: u'Curtis, Inc.',
+   0x001B3F: u'ProCurve Networking by HP',
+   0x001B40: u'Network Automation mxc AB',
+   0x001B41: u'General Infinity Co.,Ltd.',
+   0x001B42: u'Wise & Blue',
+   0x001B43: u'Beijing DG Telecommunications equipment Co.,Ltd',
+   0x001B44: u'SanDisk Corporation',
+   0x001B45: u'ABB AS, Division Automation Products',
+   0x001B46: u'Blueone Technology Co.,Ltd',
+   0x001B47: u'Futarque A/S',
+   0x001B48: u'Shenzhen Lantech Electronics Co., Ltd.',
+   0x001B49: u'Roberts Radio limited',
+   0x001B4A: u'W&W Communications, Inc.',
+   0x001B4B: u'SANION Co., Ltd.',
+   0x001B4C: u'Signtech',
+   0x001B4D: u'Areca Technology Corporation',
+   0x001B4E: u'Navman New Zealand',
+   0x001B4F: u'Avaya Inc.',
+   0x001B50: u'Nizhny Novgorod Factory named after M.Frunze, FSUE (NZiF)',
+   0x001B51: u'Vector Technology Corp.',
+   0x001B52: u'Motorola Mobile Devices',
+   0x001B53: u'Cisco Systems',
+   0x001B54: u'Cisco Systems',
+   0x001B55: u'Hurco Automation Ltd.',
+   0x001B56: u'Tehuti Networks Ltd.',
+   0x001B57: u'SEMINDIA SYSTEMS PRIVATE LIMITED',
+   0x001B58: u'PRIVATE',
+   0x001B59: u'Sony Ericsson Mobile Communications AB',
+   0x001B5A: u'Apollo Imaging Technologies, Inc.',
+   0x001B5B: u'2Wire, Inc.',
+   0x001B5C: u'Azuretec Co., Ltd.',
+   0x001B5D: u'Vololink Pty Ltd',
+   0x001B5E: u'BPL Limited',
+   0x001B5F: u'Alien Technology',
+   0x001B60: u'NAVIGON AG',
+   0x001B61: u'Digital Acoustics, LLC',
+   0x001B62: u'JHT Optoelectronics Co.,Ltd.',
+   0x001B63: u'Apple Inc.',
+   0x001B64: u'IsaacLandKorea',
+   0x001B65: u'China Gridcom Co., Ltd',
+   0x001B66: u'Sennheiser electronic GmbH & Co. KG',
+   0x001B67: u'Ubiquisys Ltd',
+   0x001B68: u'Modnnet Co., Ltd',
+   0x001B69: u'Equaline Corporation',
+   0x001B6A: u'Powerwave UK Ltd',
+   0x001B6B: u'Swyx Solutions AG',
+   0x001B6C: u'LookX Digital Media BV',
+   0x001B6D: u'Midtronics, Inc.',
+   0x001B6E: u'Anue Systems, Inc.',
+   0x001B6F: u'Teletrak Ltd',
+   0x001B70: u'IRI Ubiteq, INC.',
+   0x001B71: u'Telular Corp.',
+   0x001B72: u'Sicep s.p.a.',
+   0x001B73: u'DTL Broadcast Ltd',
+   0x001B74: u'MiraLink Corporation',
+   0x001B75: u'Hypermedia Systems',
+   0x001B76: u'Ripcode, Inc.',
+   0x001B77: u'Intel Corporate',
+   0x001B78: u'Hewlett Packard',
+   0x001B79: u'FAIVELEY TRANSPORT',
+   0x001B7A: u'Nintendo Co., Ltd.',
+   0x001B7B: u'The Tintometer Ltd',
+   0x001B7C: u'A & R Cambridge',
+   0x001B7D: u'CXR Anderson Jacobson',
+   0x001B7E: u'Beckmann GmbH',
+   0x001B7F: u'TMN Technologies Telecomunicacoes Ltda',
+   0x001B80: u'LORD Corporation',
+   0x001B81: u'DATAQ Instruments, Inc.',
+   0x001B82: u'Taiwan Semiconductor Co., Ltd.',
+   0x001B83: u'Finsoft Ltd',
+   0x001B84: u'Scan Engineering Telecom',
+   0x001B85: u'MAN Diesel A/S',
+   0x001B86: u'Bosch Access Systems GmbH',
+   0x001B87: u'Deepsound Tech. Co., Ltd',
+   0x001B88: u'Divinet Access Technologies Ltd',
+   0x001B89: u'EMZA Visual Sense Ltd.',
+   0x001B8A: u'2M Electronic A/S',
+   0x001B8B: u'NEC AccessTechnica,Ltd.',
+   0x001B8C: u'JMicron Technology Corp.',
+   0x001B8D: u'Electronic Computer Systems, Inc.',
+   0x001B8E: u'Hulu Sweden AB',
+   0x001B8F: u'Cisco Systems',
+   0x001B90: u'Cisco Systems',
+   0x001B91: u'EFKON AG',
+   0x001B92: u'l-acoustics',
+   0x001B93: u'JC Decaux SA DNT',
+   0x001B94: u'T.E.M.A. S.p.A.',
+   0x001B95: u'VIDEO SYSTEMS SRL',
+   0x001B96: u'Snif Labs, Inc.',
+   0x001B97: u'Violin Technologies',
+   0x001B98: u'Samsung Electronics Co., Ltd.',
+   0x001B99: u'KS System GmbH',
+   0x001B9A: u'Apollo Fire Detectors Ltd',
+   0x001B9B: u'Hose-McCann Communications',
+   0x001B9C: u'SATEL sp. z o.o.',
+   0x001B9D: u'Novus Security Sp. z o.o.',
+   0x001B9E: u'ASKEY  COMPUTER  CORP',
+   0x001B9F: u'Calyptech Pty Ltd',
+   0x001BA0: u'Awox',
+   0x001BA1: u'Åmic AB',
+   0x001BA2: u'IDS Imaging Development Systems GmbH',
+   0x001BA3: u'Flexit Group GmbH',
+   0x001BA4: u'S.A.E Afikim',
+   0x001BA5: u'MyungMin Systems, Inc.',
+   0x001BA6: u'intotech inc.',
+   0x001BA7: u'Lorica Solutions',
+   0x001BA8: u'UBI&MOBI,.Inc',
+   0x001BA9: u'BROTHER INDUSTRIES, LTD. Printing & Solutions Company',
+   0x001BAA: u'XenICs nv',
+   0x001BAB: u'Telchemy, Incorporated',
+   0x001BAC: u'Curtiss Wright Controls Embedded Computing',
+   0x001BAD: u'iControl Incorporated',
+   0x001BAE: u'Micro Control Systems, Inc',
+   0x001BAF: u'Nokia Danmark A/S',
+   0x001BB0: u'BHARAT ELECTRONICS',
+   0x001BB1: u'Wistron Neweb Corp.',
+   0x001BB2: u'Intellect International NV',
+   0x001BB3: u'Condalo GmbH',
+   0x001BB4: u'Airvod Limited',
+   0x001BB5: u'Cherry GmbH',
+   0x001BB6: u'Bird Electronic Corp.',
+   0x001BB7: u'Alta Heights Technology Corp.',
+   0x001BB8: u'BLUEWAY ELECTRONIC CO;LTD',
+   0x001BB9: u'Elitegroup Computer System Co.',
+   0x001C7C: u'PERQ SYSTEMS CORPORATION',
+   0x002000: u'LEXMARK INTERNATIONAL, INC.',
+   0x002001: u'DSP SOLUTIONS, INC.',
+   0x002002: u'SERITECH ENTERPRISE CO., LTD.',
+   0x002003: u'PIXEL POWER LTD.',
+   0x002004: u'YAMATAKE-HONEYWELL CO., LTD.',
+   0x002005: u'SIMPLE TECHNOLOGY',
+   0x002006: u'GARRETT COMMUNICATIONS, INC.',
+   0x002007: u'SFA, INC.',
+   0x002008: u'CABLE & COMPUTER TECHNOLOGY',
+   0x002009: u'PACKARD BELL ELEC., INC.',
+   0x00200A: u'SOURCE-COMM CORP.',
+   0x00200B: u'OCTAGON SYSTEMS CORP.',
+   0x00200C: u'ADASTRA SYSTEMS CORP.',
+   0x00200D: u'CARL ZEISS',
+   0x00200E: u'SATELLITE TECHNOLOGY MGMT, INC',
+   0x00200F: u'TANBAC CO., LTD.',
+   0x002010: u'JEOL SYSTEM TECHNOLOGY CO. LTD',
+   0x002011: u'CANOPUS CO., LTD.',
+   0x002012: u'CAMTRONICS MEDICAL SYSTEMS',
+   0x002013: u'DIVERSIFIED TECHNOLOGY, INC.',
+   0x002014: u'GLOBAL VIEW CO., LTD.',
+   0x002015: u'ACTIS COMPUTER SA',
+   0x002016: u'SHOWA ELECTRIC WIRE & CABLE CO',
+   0x002017: u'ORBOTECH',
+   0x002018: u'CIS TECHNOLOGY INC.',
+   0x002019: u'OHLER GmbH',
+   0x00201A: u'MRV Communications, Inc.',
+   0x00201B: u'NORTHERN TELECOM/NETWORK',
+   0x00201C: u'EXCEL, INC.',
+   0x00201D: u'KATANA PRODUCTS',
+   0x00201E: u'NETQUEST CORPORATION',
+   0x00201F: u'BEST POWER TECHNOLOGY, INC.',
+   0x002020: u'MEGATRON COMPUTER INDUSTRIES PTY, LTD.',
+   0x002021: u'ALGORITHMS SOFTWARE PVT. LTD.',
+   0x002022: u'NMS Communications',
+   0x002023: u'T.C. TECHNOLOGIES PTY. LTD',
+   0x002024: u'PACIFIC COMMUNICATION SCIENCES',
+   0x002025: u'CONTROL TECHNOLOGY, INC.',
+   0x002026: u'AMKLY SYSTEMS, INC.',
+   0x002027: u'MING FORTUNE INDUSTRY CO., LTD',
+   0x002028: u'WEST EGG SYSTEMS, INC.',
+   0x002029: u'TELEPROCESSING PRODUCTS, INC.',
+   0x00202A: u'N.V. DZINE',
+   0x00202B: u'ADVANCED TELECOMMUNICATIONS MODULES, LTD.',
+   0x00202C: u'WELLTRONIX CO., LTD.',
+   0x00202D: u'TAIYO CORPORATION',
+   0x00202E: u'DAYSTAR DIGITAL',
+   0x00202F: u'ZETA COMMUNICATIONS, LTD.',
+   0x002030: u'ANALOG & DIGITAL SYSTEMS',
+   0x002031: u'ERTEC GmbH',
+   0x002032: u'ALCATEL TAISEL',
+   0x002033: u'SYNAPSE TECHNOLOGIES, INC.',
+   0x002034: u'ROTEC INDUSTRIEAUTOMATION GMBH',
+   0x002035: u'IBM CORPORATION',
+   0x002036: u'BMC SOFTWARE',
+   0x002037: u'SEAGATE TECHNOLOGY',
+   0x002038: u'VME MICROSYSTEMS INTERNATIONAL CORPORATION',
+   0x002039: u'SCINETS',
+   0x00203A: u'DIGITAL BI0METRICS INC.',
+   0x00203B: u'WISDM LTD.',
+   0x00203C: u'EUROTIME AB',
+   0x00203D: u'NOVAR ELECTRONICS CORPORATION',
+   0x00203E: u'LogiCan Technologies, Inc.',
+   0x00203F: u'JUKI CORPORATION',
+   0x002040: u'Motorola Broadband Communications Sector',
+   0x002041: u'DATA NET',
+   0x002042: u'DATAMETRICS CORP.',
+   0x002043: u'NEURON COMPANY LIMITED',
+   0x002044: u'GENITECH PTY LTD',
+   0x002045: u'ION Networks, Inc.',
+   0x002046: u'CIPRICO, INC.',
+   0x002047: u'STEINBRECHER CORP.',
+   0x002048: u'Marconi Communications',
+   0x002049: u'COMTRON, INC.',
+   0x00204A: u'PRONET GMBH',
+   0x00204B: u'AUTOCOMPUTER CO., LTD.',
+   0x00204C: u'MITRON COMPUTER PTE LTD.',
+   0x00204D: u'INOVIS GMBH',
+   0x00204E: u'NETWORK SECURITY SYSTEMS, INC.',
+   0x00204F: u'DEUTSCHE AEROSPACE AG',
+   0x002050: u'KOREA COMPUTER INC.',
+   0x002051: u'Verilink Corporation',
+   0x002052: u'RAGULA SYSTEMS',
+   0x002053: u'HUNTSVILLE MICROSYSTEMS, INC.',
+   0x002054: u'EASTERN RESEARCH, INC.',
+   0x002055: u'ALTECH CO., LTD.',
+   0x002056: u'NEOPRODUCTS',
+   0x002057: u'TITZE DATENTECHNIK GmbH',
+   0x002058: u'ALLIED SIGNAL INC.',
+   0x002059: u'MIRO COMPUTER PRODUCTS AG',
+   0x00205A: u'COMPUTER IDENTICS',
+   0x00205B: u'Kentrox, LLC',
+   0x00205C: u'InterNet Systems of Florida, Inc.',
+   0x00205D: u'NANOMATIC OY',
+   0x00205E: u'CASTLE ROCK, INC.',
+   0x00205F: u'GAMMADATA COMPUTER GMBH',
+   0x002060: u'ALCATEL ITALIA S.p.A.',
+   0x002061: u'DYNATECH COMMUNICATIONS, INC.',
+   0x002062: u'SCORPION LOGIC, LTD.',
+   0x002063: u'WIPRO INFOTECH LTD.',
+   0x002064: u'PROTEC MICROSYSTEMS, INC.',
+   0x002065: u'SUPERNET NETWORKING INC.',
+   0x002066: u'GENERAL MAGIC, INC.',
+   0x002067: u'PRIVATE',
+   0x002068: u'ISDYNE',
+   0x002069: u'ISDN SYSTEMS CORPORATION',
+   0x00206A: u'OSAKA COMPUTER CORP.',
+   0x00206B: u'KONICA MINOLTA HOLDINGS, INC.',
+   0x00206C: u'EVERGREEN TECHNOLOGY CORP.',
+   0x00206D: u'DATA RACE, INC.',
+   0x00206E: u'XACT, INC.',
+   0x00206F: u'FLOWPOINT CORPORATION',
+   0x002070: u'HYNET, LTD.',
+   0x002071: u'IBR GMBH',
+   0x002072: u'WORKLINK INNOVATIONS',
+   0x002073: u'FUSION SYSTEMS CORPORATION',
+   0x002074: u'SUNGWOON SYSTEMS',
+   0x002075: u'MOTOROLA COMMUNICATION ISRAEL',
+   0x002076: u'REUDO CORPORATION',
+   0x002077: u'KARDIOS SYSTEMS CORP.',
+   0x002078: u'RUNTOP, INC.',
+   0x002079: u'MIKRON GMBH',
+   0x00207A: u'WiSE Communications, Inc.',
+   0x00207B: u'Intel Corporation',
+   0x00207C: u'AUTEC GmbH',
+   0x00207D: u'ADVANCED COMPUTER APPLICATIONS',
+   0x00207E: u'FINECOM Co., Ltd.',
+   0x00207F: u'KYOEI SANGYO CO., LTD.',
+   0x002080: u'SYNERGY (UK) LTD.',
+   0x002081: u'TITAN ELECTRONICS',
+   0x002082: u'ONEAC CORPORATION',
+   0x002083: u'PRESTICOM INCORPORATED',
+   0x002084: u'OCE PRINTING SYSTEMS, GMBH',
+   0x002085: u'EXIDE ELECTRONICS',
+   0x002086: u'MICROTECH ELECTRONICS LIMITED',
+   0x002087: u'MEMOTEC COMMUNICATIONS CORP.',
+   0x002088: u'GLOBAL VILLAGE COMMUNICATION',
+   0x002089: u'T3PLUS NETWORKING, INC.',
+   0x00208A: u'SONIX COMMUNICATIONS, LTD.',
+   0x00208B: u'LAPIS TECHNOLOGIES, INC.',
+   0x00208C: u'GALAXY NETWORKS, INC.',
+   0x00208D: u'CMD TECHNOLOGY',
+   0x00208E: u'CHEVIN SOFTWARE ENG. LTD.',
+   0x00208F: u'ECI TELECOM LTD.',
+   0x002090: u'ADVANCED COMPRESSION TECHNOLOGY, INC.',
+   0x002091: u'J125, NATIONAL SECURITY AGENCY',
+   0x002092: u'CHESS ENGINEERING B.V.',
+   0x002093: u'LANDINGS TECHNOLOGY CORP.',
+   0x002094: u'CUBIX CORPORATION',
+   0x002095: u'RIVA ELECTRONICS',
+   0x002096: u'Invensys',
+   0x002097: u'APPLIED SIGNAL TECHNOLOGY',
+   0x002098: u'HECTRONIC AB',
+   0x002099: u'BON ELECTRIC CO., LTD.',
+   0x00209A: u'THE 3DO COMPANY',
+   0x00209B: u'ERSAT ELECTRONIC GMBH',
+   0x00209C: u'PRIMARY ACCESS CORP.',
+   0x00209D: u'LIPPERT AUTOMATIONSTECHNIK',
+   0x00209E: u'BROWN\'S OPERATING SYSTEM SERVICES, LTD.',
+   0x00209F: u'MERCURY COMPUTER SYSTEMS, INC.',
+   0x0020A0: u'OA LABORATORY CO., LTD.',
+   0x0020A1: u'DOVATRON',
+   0x0020A2: u'GALCOM NETWORKING LTD.',
+   0x0020A3: u'DIVICOM INC.',
+   0x0020A4: u'MULTIPOINT NETWORKS',
+   0x0020A5: u'API ENGINEERING',
+   0x0020A6: u'PROXIM, INC.',
+   0x0020A7: u'PAIRGAIN TECHNOLOGIES, INC.',
+   0x0020A8: u'SAST TECHNOLOGY CORP.',
+   0x0020A9: u'WHITE HORSE INDUSTRIAL',
+   0x0020AA: u'DIGIMEDIA VISION LTD.',
+   0x0020AB: u'MICRO INDUSTRIES CORP.',
+   0x0020AC: u'INTERFLEX DATENSYSTEME GMBH',
+   0x0020AD: u'LINQ SYSTEMS',
+   0x0020AE: u'ORNET DATA COMMUNICATION TECH.',
+   0x0020AF: u'3COM CORPORATION',
+   0x0020B0: u'GATEWAY DEVICES, INC.',
+   0x0020B1: u'COMTECH RESEARCH INC.',
+   0x0020B2: u'GKD Gesellschaft Fur Kommunikation Und Datentechnik',
+   0x0020B3: u'SCLTEC COMMUNICATIONS SYSTEMS',
+   0x0020B4: u'TERMA ELEKTRONIK AS',
+   0x0020B5: u'YASKAWA ELECTRIC CORPORATION',
+   0x0020B6: u'AGILE NETWORKS, INC.',
+   0x0020B7: u'NAMAQUA COMPUTERWARE',
+   0x0020B8: u'PRIME OPTION, INC.',
+   0x0020B9: u'METRICOM, INC.',
+   0x0020BA: u'CENTER FOR HIGH PERFORMANCE',
+   0x0020BB: u'ZAX CORPORATION',
+   0x0020BC: u'Long Reach Networks Pty Ltd',
+   0x0020BD: u'NIOBRARA R & D CORPORATION',
+   0x0020BE: u'LAN ACCESS CORP.',
+   0x0020BF: u'AEHR TEST SYSTEMS',
+   0x0020C0: u'PULSE ELECTRONICS, INC.',
+   0x0020C1: u'SAXA, Inc.',
+   0x0020C2: u'TEXAS MEMORY SYSTEMS, INC.',
+   0x0020C3: u'COUNTER SOLUTIONS LTD.',
+   0x0020C4: u'INET,INC.',
+   0x0020C5: u'EAGLE TECHNOLOGY',
+   0x0020C6: u'NECTEC',
+   0x0020C7: u'AKAI Professional M.I. Corp.',
+   0x0020C8: u'LARSCOM INCORPORATED',
+   0x0020C9: u'VICTRON BV',
+   0x0020CA: u'DIGITAL OCEAN',
+   0x0020CB: u'PRETEC ELECTRONICS CORP.',
+   0x0020CC: u'DIGITAL SERVICES, LTD.',
+   0x0020CD: u'HYBRID NETWORKS, INC.',
+   0x0020CE: u'LOGICAL DESIGN GROUP, INC.',
+   0x0020CF: u'TEST & MEASUREMENT SYSTEMS INC',
+   0x0020D0: u'VERSALYNX CORPORATION',
+   0x0020D1: u'MICROCOMPUTER SYSTEMS (M) SDN.',
+   0x0020D2: u'RAD DATA COMMUNICATIONS, LTD.',
+   0x0020D3: u'OST (OUEST STANDARD TELEMATIQU',
+   0x0020D4: u'CABLETRON - ZEITTNET INC.',
+   0x0020D5: u'VIPA GMBH',
+   0x0020D6: u'BREEZECOM',
+   0x0020D7: u'JAPAN MINICOMPUTER SYSTEMS CO., Ltd.',
+   0x0020D8: u'Nortel Networks',
+   0x0020D9: u'PANASONIC TECHNOLOGIES, INC./MIECO-US',
+   0x0020DA: u'Alcatel North America ESD',
+   0x0020DB: u'XNET TECHNOLOGY, INC.',
+   0x0020DC: u'DENSITRON TAIWAN LTD.',
+   0x0020DD: u'Cybertec Pty Ltd',
+   0x0020DE: u'JAPAN DIGITAL LABORAT\'Y CO.LTD',
+   0x0020DF: u'KYOSAN ELECTRIC MFG. CO., LTD.',
+   0x0020E0: u'Actiontec Electronics, Inc.',
+   0x0020E1: u'ALAMAR ELECTRONICS',
+   0x0020E2: u'INFORMATION RESOURCE ENGINEERING',
+   0x0020E3: u'MCD KENCOM CORPORATION',
+   0x0020E4: u'HSING TECH ENTERPRISE CO., LTD',
+   0x0020E5: u'APEX DATA, INC.',
+   0x0020E6: u'LIDKOPING MACHINE TOOLS AB',
+   0x0020E7: u'B&W NUCLEAR SERVICE COMPANY',
+   0x0020E8: u'DATATREK CORPORATION',
+   0x0020E9: u'DANTEL',
+   0x0020EA: u'EFFICIENT NETWORKS, INC.',
+   0x0020EB: u'CINCINNATI MICROWAVE, INC.',
+   0x0020EC: u'TECHWARE SYSTEMS CORP.',
+   0x0020ED: u'GIGA-BYTE TECHNOLOGY CO., LTD.',
+   0x0020EE: u'GTECH CORPORATION',
+   0x0020EF: u'USC CORPORATION',
+   0x0020F0: u'UNIVERSAL MICROELECTRONICS CO.',
+   0x0020F1: u'ALTOS INDIA LIMITED',
+   0x0020F2: u'SUN MICROSYSTEMS, INC.',
+   0x0020F3: u'RAYNET CORPORATION',
+   0x0020F4: u'SPECTRIX CORPORATION',
+   0x0020F5: u'PANDATEL AG',
+   0x0020F6: u'NET TEK  AND KARLNET, INC.',
+   0x0020F7: u'CYBERDATA',
+   0x0020F8: u'CARRERA COMPUTERS, INC.',
+   0x0020F9: u'PARALINK NETWORKS, INC.',
+   0x0020FA: u'GDE SYSTEMS, INC.',
+   0x0020FB: u'OCTEL COMMUNICATIONS CORP.',
+   0x0020FC: u'MATROX',
+   0x0020FD: u'ITV TECHNOLOGIES, INC.',
+   0x0020FE: u'TOPWARE INC. / GRAND COMPUTER',
+   0x0020FF: u'SYMMETRICAL TECHNOLOGIES',
+   0x002654: u'3Com Corporation',
+   0x003000: u'ALLWELL TECHNOLOGY CORP.',
+   0x003001: u'SMP',
+   0x003002: u'Expand Networks',
+   0x003003: u'Phasys Ltd.',
+   0x003004: u'LEADTEK RESEARCH INC.',
+   0x003005: u'Fujitsu Siemens Computers',
+   0x003006: u'SUPERPOWER COMPUTER',
+   0x003007: u'OPTI, INC.',
+   0x003008: u'AVIO DIGITAL, INC.',
+   0x003009: u'Tachion Networks, Inc.',
+   0x00300A: u'AZTECH SYSTEMS LTD.',
+   0x00300B: u'mPHASE Technologies, Inc.',
+   0x00300C: u'CONGRUENCY, LTD.',
+   0x00300D: u'MMC Technology, Inc.',
+   0x00300E: u'Klotz Digital AG',
+   0x00300F: u'IMT - Information Management T',
+   0x003010: u'VISIONETICS INTERNATIONAL',
+   0x003011: u'HMS FIELDBUS SYSTEMS AB',
+   0x003012: u'DIGITAL ENGINEERING LTD.',
+   0x003013: u'NEC Corporation',
+   0x003014: u'DIVIO, INC.',
+   0x003015: u'CP CLARE CORP.',
+   0x003016: u'ISHIDA CO., LTD.',
+   0x003017: u'BlueArc UK Ltd',
+   0x003018: u'Jetway Information Co., Ltd.',
+   0x003019: u'CISCO SYSTEMS, INC.',
+   0x00301A: u'SMARTBRIDGES PTE. LTD.',
+   0x00301B: u'SHUTTLE, INC.',
+   0x00301C: u'ALTVATER AIRDATA SYSTEMS',
+   0x00301D: u'SKYSTREAM, INC.',
+   0x00301E: u'3COM Europe Ltd.',
+   0x00301F: u'OPTICAL NETWORKS, INC.',
+   0x003020: u'TSI, Inc..',
+   0x003021: u'HSING TECH. ENTERPRISE CO.,LTD',
+   0x003022: u'Fong Kai Industrial Co., Ltd.',
+   0x003023: u'COGENT COMPUTER SYSTEMS, INC.',
+   0x003024: u'CISCO SYSTEMS, INC.',
+   0x003025: u'CHECKOUT COMPUTER SYSTEMS, LTD',
+   0x003026: u'HeiTel Digital Video GmbH',
+   0x003027: u'KERBANGO, INC.',
+   0x003028: u'FASE Saldatura srl',
+   0x003029: u'OPICOM',
+   0x00302A: u'SOUTHERN INFORMATION',
+   0x00302B: u'INALP NETWORKS, INC.',
+   0x00302C: u'SYLANTRO SYSTEMS CORPORATION',
+   0x00302D: u'QUANTUM BRIDGE COMMUNICATIONS',
+   0x00302E: u'Hoft & Wessel AG',
+   0x00302F: u'Smiths Industries',
+   0x003030: u'HARMONIX CORPORATION',
+   0x003031: u'LIGHTWAVE COMMUNICATIONS, INC.',
+   0x003032: u'MagicRam, Inc.',
+   0x003033: u'ORIENT TELECOM CO., LTD.',
+   0x003034: u'SET ENGINEERING',
+   0x003035: u'Corning Incorporated',
+   0x003036: u'RMP ELEKTRONIKSYSTEME GMBH',
+   0x003037: u'Packard Bell Nec Services',
+   0x003038: u'XCP, INC.',
+   0x003039: u'SOFTBOOK PRESS',
+   0x00303A: u'MAATEL',
+   0x00303B: u'PowerCom Technology',
+   0x00303C: u'ONNTO CORP.',
+   0x00303D: u'IVA CORPORATION',
+   0x00303E: u'Radcom Ltd.',
+   0x00303F: u'TurboComm Tech Inc.',
+   0x003040: u'CISCO SYSTEMS, INC.',
+   0x003041: u'SAEJIN T & M CO., LTD.',
+   0x003042: u'DeTeWe-Deutsche Telephonwerke',
+   0x003043: u'IDREAM TECHNOLOGIES, PTE. LTD.',
+   0x003044: u'Portsmith LLC',
+   0x003045: u'Village Networks, Inc. (VNI)',
+   0x003046: u'Controlled Electronic Manageme',
+   0x003047: u'NISSEI ELECTRIC CO., LTD.',
+   0x003048: u'Supermicro Computer, Inc.',
+   0x003049: u'BRYANT TECHNOLOGY, LTD.',
+   0x00304A: u'Fraunhofer IPMS',
+   0x00304B: u'ORBACOM SYSTEMS, INC.',
+   0x00304C: u'APPIAN COMMUNICATIONS, INC.',
+   0x00304D: u'ESI',
+   0x00304E: u'BUSTEC PRODUCTION LTD.',
+   0x00304F: u'PLANET Technology Corporation',
+   0x003050: u'Versa Technology',
+   0x003051: u'ORBIT AVIONIC & COMMUNICATION',
+   0x003052: u'ELASTIC NETWORKS',
+   0x003053: u'Basler AG',
+   0x003054: u'CASTLENET TECHNOLOGY, INC.',
+   0x003055: u'Hitachi Semiconductor America,',
+   0x003056: u'Beck IPC GmbH',
+   0x003057: u'QTelNet, Inc.',
+   0x003058: u'API MOTION',
+   0x003059: u'DIGITAL-LOGIC AG',
+   0x00305A: u'TELGEN CORPORATION',
+   0x00305B: u'MODULE DEPARTMENT',
+   0x00305C: u'SMAR Laboratories Corp.',
+   0x00305D: u'DIGITRA SYSTEMS, INC.',
+   0x00305E: u'Abelko Innovation',
+   0x00305F: u'IMACON APS',
+   0x003060: u'Powerfile, Inc.',
+   0x003061: u'MobyTEL',
+   0x003062: u'PATH 1 NETWORK TECHNOL\'S INC.',
+   0x003063: u'SANTERA SYSTEMS, INC.',
+   0x003064: u'ADLINK TECHNOLOGY, INC.',
+   0x003065: u'APPLE COMPUTER, INC.',
+   0x003066: u'DIGITAL WIRELESS CORPORATION',
+   0x003067: u'BIOSTAR MICROTECH INT\'L CORP.',
+   0x003068: u'CYBERNETICS TECH. CO., LTD.',
+   0x003069: u'IMPACCT TECHNOLOGY CORP.',
+   0x00306A: u'PENTA MEDIA CO., LTD.',
+   0x00306B: u'CMOS SYSTEMS, INC.',
+   0x00306C: u'Hitex Holding GmbH',
+   0x00306D: u'LUCENT TECHNOLOGIES',
+   0x00306E: u'HEWLETT PACKARD',
+   0x00306F: u'SEYEON TECH. CO., LTD.',
+   0x003070: u'1Net Corporation',
+   0x003071: u'Cisco Systems, Inc.',
+   0x003072: u'INTELLIBYTE INC.',
+   0x003073: u'International Microsystems, In',
+   0x003074: u'EQUIINET LTD.',
+   0x003075: u'ADTECH',
+   0x003076: u'Akamba Corporation',
+   0x003077: u'ONPREM NETWORKS',
+   0x003078: u'Cisco Systems, Inc.',
+   0x003079: u'CQOS, INC.',
+   0x00307A: u'Advanced Technology & Systems',
+   0x00307B: u'Cisco Systems, Inc.',
+   0x00307C: u'ADID SA',
+   0x00307D: u'GRE AMERICA, INC.',
+   0x00307E: u'Redflex Communication Systems',
+   0x00307F: u'IRLAN LTD.',
+   0x003080: u'CISCO SYSTEMS, INC.',
+   0x003081: u'ALTOS C&C',
+   0x003082: u'TAIHAN ELECTRIC WIRE CO., LTD.',
+   0x003083: u'Ivron Systems',
+   0x003084: u'ALLIED TELESYN INTERNAIONAL',
+   0x003085: u'CISCO SYSTEMS, INC.',
+   0x003086: u'Transistor Devices, Inc.',
+   0x003087: u'VEGA GRIESHABER KG',
+   0x003088: u'Siara Systems, Inc.',
+   0x003089: u'Spectrapoint Wireless, LLC',
+   0x00308A: u'NICOTRA SISTEMI S.P.A',
+   0x00308B: u'Brix Networks',
+   0x00308C: u'ADVANCED DIGITAL INFORMATION',
+   0x00308D: u'PINNACLE SYSTEMS, INC.',
+   0x00308E: u'CROSS MATCH TECHNOLOGIES, INC.',
+   0x00308F: u'MICRILOR, Inc.',
+   0x003090: u'CYRA TECHNOLOGIES, INC.',
+   0x003091: u'TAIWAN FIRST LINE ELEC. CORP.',
+   0x003092: u'ModuNORM GmbH',
+   0x003093: u'SONNET TECHNOLOGIES, INC.',
+   0x003094: u'Cisco Systems, Inc.',
+   0x003095: u'Procomp Informatics, Ltd.',
+   0x003096: u'CISCO SYSTEMS, INC.',
+   0x003097: u'EXOMATIC AB',
+   0x003098: u'Global Converging Technologies',
+   0x003099: u'BOENIG UND KALLENBACH OHG',
+   0x00309A: u'ASTRO TERRA CORP.',
+   0x00309B: u'Smartware',
+   0x00309C: u'Timing Applications, Inc.',
+   0x00309D: u'Nimble Microsystems, Inc.',
+   0x00309E: u'WORKBIT CORPORATION.',
+   0x00309F: u'AMBER NETWORKS',
+   0x0030A0: u'TYCO SUBMARINE SYSTEMS, LTD.',
+   0x0030A1: u'WEBGATE Inc.',
+   0x0030A2: u'Lightner Engineering',
+   0x0030A3: u'CISCO SYSTEMS, INC.',
+   0x0030A4: u'Woodwind Communications System',
+   0x0030A5: u'ACTIVE POWER',
+   0x0030A6: u'VIANET TECHNOLOGIES, LTD.',
+   0x0030A7: u'SCHWEITZER ENGINEERING',
+   0x0030A8: u'OL\'E COMMUNICATIONS, INC.',
+   0x0030A9: u'Netiverse, Inc.',
+   0x0030AA: u'AXUS MICROSYSTEMS, INC.',
+   0x0030AB: u'DELTA NETWORKS, INC.',
+   0x0030AC: u'Systeme Lauer GmbH & Co., Ltd.',
+   0x0030AD: u'SHANGHAI COMMUNICATION',
+   0x0030AE: u'Times N System, Inc.',
+   0x0030AF: u'Honeywell GmbH',
+   0x0030B0: u'Convergenet Technologies',
+   0x0030B1: u'aXess-pro networks GmbH',
+   0x0030B2: u'L-3 Sonoma EO',
+   0x0030B3: u'San Valley Systems, Inc.',
+   0x0030B4: u'INTERSIL CORP.',
+   0x0030B5: u'Tadiran Microwave Networks',
+   0x0030B6: u'CISCO SYSTEMS, INC.',
+   0x0030B7: u'Teletrol Systems, Inc.',
+   0x0030B8: u'RiverDelta Networks',
+   0x0030B9: u'ECTEL',
+   0x0030BA: u'AC&T SYSTEM CO., LTD.',
+   0x0030BB: u'CacheFlow, Inc.',
+   0x0030BC: u'Optronic AG',
+   0x0030BD: u'BELKIN COMPONENTS',
+   0x0030BE: u'City-Net Technology, Inc.',
+   0x0030BF: u'MULTIDATA GMBH',
+   0x0030C0: u'Lara Technology, Inc.',
+   0x0030C1: u'HEWLETT-PACKARD',
+   0x0030C2: u'COMONE',
+   0x0030C3: u'FLUECKIGER ELEKTRONIK AG',
+   0x0030C4: u'Canon Imaging System Technologies, Inc.',
+   0x0030C5: u'CADENCE DESIGN SYSTEMS',
+   0x0030C6: u'CONTROL SOLUTIONS, INC.',
+   0x0030C7: u'MACROMATE CORP.',
+   0x0030C8: u'GAD LINE, LTD.',
+   0x0030C9: u'LuxN, N',
+   0x0030CA: u'Discovery Com',
+   0x0030CB: u'OMNI FLOW COMPUTERS, INC.',
+   0x0030CC: u'Tenor Networks, Inc.',
+   0x0030CD: u'CONEXANT SYSTEMS, INC.',
+   0x0030CE: u'Zaffire',
+   0x0030CF: u'TWO TECHNOLOGIES, INC.',
+   0x0030D0: u'Tellabs',
+   0x0030D1: u'INOVA CORPORATION',
+   0x0030D2: u'WIN TECHNOLOGIES, CO., LTD.',
+   0x0030D3: u'Agilent Technologies',
+   0x0030D4: u'AAE Systems, Inc',
+   0x0030D5: u'DResearch GmbH',
+   0x0030D6: u'MSC VERTRIEBS GMBH',
+   0x0030D7: u'Innovative Systems, L.L.C.',
+   0x0030D8: u'SITEK',
+   0x0030D9: u'DATACORE SOFTWARE CORP.',
+   0x0030DA: u'COMTREND CO.',
+   0x0030DB: u'Mindready Solutions, Inc.',
+   0x0030DC: u'RIGHTECH CORPORATION',
+   0x0030DD: u'INDIGITA CORPORATION',
+   0x0030DE: u'WAGO Kontakttechnik GmbH',
+   0x0030DF: u'KB/TEL TELECOMUNICACIONES',
+   0x0030E0: u'OXFORD SEMICONDUCTOR LTD.',
+   0x0030E1: u'ACROTRON SYSTEMS, INC.',
+   0x0030E2: u'GARNET SYSTEMS CO., LTD.',
+   0x0030E3: u'SEDONA NETWORKS CORP.',
+   0x0030E4: u'CHIYODA SYSTEM RIKEN',
+   0x0030E5: u'Amper Datos S.A.',
+   0x0030E6: u'Draeger Medical Systems, Inc.',
+   0x0030E7: u'CNF MOBILE SOLUTIONS, INC.',
+   0x0030E8: u'ENSIM CORP.',
+   0x0030E9: u'GMA COMMUNICATION MANUFACT\'G',
+   0x0030EA: u'TeraForce Technology Corporation',
+   0x0030EB: u'TURBONET COMMUNICATIONS, INC.',
+   0x0030EC: u'BORGARDT',
+   0x0030ED: u'Expert Magnetics Corp.',
+   0x0030EE: u'DSG Technology, Inc.',
+   0x0030EF: u'NEON TECHNOLOGY, INC.',
+   0x0030F0: u'Uniform Industrial Corp.',
+   0x0030F1: u'Accton Technology Corp.',
+   0x0030F2: u'CISCO SYSTEMS, INC.',
+   0x0030F3: u'At Work Computers',
+   0x0030F4: u'STARDOT TECHNOLOGIES',
+   0x0030F5: u'Wild Lab. Ltd.',
+   0x0030F6: u'SECURELOGIX CORPORATION',
+   0x0030F7: u'RAMIX INC.',
+   0x0030F8: u'Dynapro Systems, Inc.',
+   0x0030F9: u'Sollae Systems Co., Ltd.',
+   0x0030FA: u'TELICA, INC.',
+   0x0030FB: u'AZS Technology AG',
+   0x0030FC: u'Terawave Communications, Inc.',
+   0x0030FD: u'INTEGRATED SYSTEMS DESIGN',
+   0x0030FE: u'DSA GmbH',
+   0x0030FF: u'DATAFAB SYSTEMS, INC.',
+   0x004000: u'PCI COMPONENTES DA AMZONIA LTD',
+   0x004001: u'ZYXEL COMMUNICATIONS, INC.',
+   0x004002: u'PERLE SYSTEMS LIMITED',
+   0x004003: u'Emerson Process Management Power & Water Solutions, Inc.',
+   0x004004: u'ICM CO. LTD.',
+   0x004005: u'ANI COMMUNICATIONS INC.',
+   0x004006: u'SAMPO TECHNOLOGY CORPORATION',
+   0x004007: u'TELMAT INFORMATIQUE',
+   0x004008: u'A PLUS INFO CORPORATION',
+   0x004009: u'TACHIBANA TECTRON CO., LTD.',
+   0x00400A: u'PIVOTAL TECHNOLOGIES, INC.',
+   0x00400B: u'CISCO SYSTEMS, INC.',
+   0x00400C: u'GENERAL MICRO SYSTEMS, INC.',
+   0x00400D: u'LANNET DATA COMMUNICATIONS,LTD',
+   0x00400E: u'MEMOTEC COMMUNICATIONS, INC.',
+   0x00400F: u'DATACOM TECHNOLOGIES',
+   0x004010: u'SONIC SYSTEMS, INC.',
+   0x004011: u'ANDOVER CONTROLS CORPORATION',
+   0x004012: u'WINDATA, INC.',
+   0x004013: u'NTT DATA COMM. SYSTEMS CORP.',
+   0x004014: u'COMSOFT GMBH',
+   0x004015: u'ASCOM INFRASYS AG',
+   0x004016: u'HADAX ELECTRONICS, INC.',
+   0x004017: u'Silex Technology America',
+   0x004018: u'ADOBE SYSTEMS, INC.',
+   0x004019: u'AEON SYSTEMS, INC.',
+   0x00401A: u'FUJI ELECTRIC CO., LTD.',
+   0x00401B: u'PRINTER SYSTEMS CORP.',
+   0x00401C: u'AST RESEARCH, INC.',
+   0x00401D: u'INVISIBLE SOFTWARE, INC.',
+   0x00401E: u'ICC',
+   0x00401F: u'COLORGRAPH LTD',
+   0x004020: u'PINACL COMMUNICATION',
+   0x004021: u'RASTER GRAPHICS',
+   0x004022: u'KLEVER COMPUTERS, INC.',
+   0x004023: u'LOGIC CORPORATION',
+   0x004024: u'COMPAC INC.',
+   0x004025: u'MOLECULAR DYNAMICS',
+   0x004026: u'MELCO, INC.',
+   0x004027: u'SMC MASSACHUSETTS, INC.',
+   0x004028: u'NETCOMM LIMITED',
+   0x004029: u'COMPEX',
+   0x00402A: u'CANOGA-PERKINS',
+   0x00402B: u'TRIGEM COMPUTER, INC.',
+   0x00402C: u'ISIS DISTRIBUTED SYSTEMS, INC.',
+   0x00402D: u'HARRIS ADACOM CORPORATION',
+   0x00402E: u'PRECISION SOFTWARE, INC.',
+   0x00402F: u'XLNT DESIGNS INC.',
+   0x004030: u'GK COMPUTER',
+   0x004031: u'KOKUSAI ELECTRIC CO., LTD',
+   0x004032: u'DIGITAL COMMUNICATIONS',
+   0x004033: u'ADDTRON TECHNOLOGY CO., LTD.',
+   0x004034: u'BUSTEK CORPORATION',
+   0x004035: u'OPCOM',
+   0x004036: u'TRIBE COMPUTER WORKS, INC.',
+   0x004037: u'SEA-ILAN, INC.',
+   0x004038: u'TALENT ELECTRIC INCORPORATED',
+   0x004039: u'OPTEC DAIICHI DENKO CO., LTD.',
+   0x00403A: u'IMPACT TECHNOLOGIES',
+   0x00403B: u'SYNERJET INTERNATIONAL CORP.',
+   0x00403C: u'FORKS, INC.',
+   0x00403D: u'TERADATA',
+   0x00403E: u'RASTER OPS CORPORATION',
+   0x00403F: u'SSANGYONG COMPUTER SYSTEMS',
+   0x004040: u'RING ACCESS, INC.',
+   0x004041: u'FUJIKURA LTD.',
+   0x004042: u'N.A.T. GMBH',
+   0x004043: u'NOKIA TELECOMMUNICATIONS',
+   0x004044: u'QNIX COMPUTER CO., LTD.',
+   0x004045: u'TWINHEAD CORPORATION',
+   0x004046: u'UDC RESEARCH LIMITED',
+   0x004047: u'WIND RIVER SYSTEMS',
+   0x004048: u'SMD INFORMATICA S.A.',
+   0x004049: u'TEGIMENTA AG',
+   0x00404A: u'WEST AUSTRALIAN DEPARTMENT',
+   0x00404B: u'MAPLE COMPUTER SYSTEMS',
+   0x00404C: u'HYPERTEC PTY LTD.',
+   0x00404D: u'TELECOMMUNICATIONS TECHNIQUES',
+   0x00404E: u'FLUENT, INC.',
+   0x00404F: u'SPACE & NAVAL WARFARE SYSTEMS',
+   0x004050: u'IRONICS, INCORPORATED',
+   0x004051: u'GRACILIS, INC.',
+   0x004052: u'STAR TECHNOLOGIES, INC.',
+   0x004053: u'AMPRO COMPUTERS',
+   0x004054: u'CONNECTION MACHINES SERVICES',
+   0x004055: u'METRONIX GMBH',
+   0x004056: u'MCM JAPAN LTD.',
+   0x004057: u'LOCKHEED - SANDERS',
+   0x004058: u'KRONOS, INC.',
+   0x004059: u'YOSHIDA KOGYO K. K.',
+   0x00405A: u'GOLDSTAR INFORMATION & COMM.',
+   0x00405B: u'FUNASSET LIMITED',
+   0x00405C: u'FUTURE SYSTEMS, INC.',
+   0x00405D: u'STAR-TEK, INC.',
+   0x00405E: u'NORTH HILLS ISRAEL',
+   0x00405F: u'AFE COMPUTERS LTD.',
+   0x004060: u'COMENDEC LTD',
+   0x004061: u'DATATECH ENTERPRISES CO., LTD.',
+   0x004062: u'E-SYSTEMS, INC./GARLAND DIV.',
+   0x004063: u'VIA TECHNOLOGIES, INC.',
+   0x004064: u'KLA INSTRUMENTS CORPORATION',
+   0x004065: u'GTE SPACENET',
+   0x004066: u'HITACHI CABLE, LTD.',
+   0x004067: u'OMNIBYTE CORPORATION',
+   0x004068: u'EXTENDED SYSTEMS',
+   0x004069: u'LEMCOM SYSTEMS, INC.',
+   0x00406A: u'KENTEK INFORMATION SYSTEMS,INC',
+   0x00406B: u'SYSGEN',
+   0x00406C: u'COPERNIQUE',
+   0x00406D: u'LANCO, INC.',
+   0x00406E: u'COROLLARY, INC.',
+   0x00406F: u'SYNC RESEARCH INC.',
+   0x004070: u'INTERWARE CO., LTD.',
+   0x004071: u'ATM COMPUTER GMBH',
+   0x004072: u'Applied Innovation Inc.',
+   0x004073: u'BASS ASSOCIATES',
+   0x004074: u'CABLE AND WIRELESS',
+   0x004075: u'M-TRADE (UK) LTD',
+   0x004076: u'Sun Conversion Technologies',
+   0x004077: u'MAXTON TECHNOLOGY CORPORATION',
+   0x004078: u'WEARNES AUTOMATION PTE LTD',
+   0x004079: u'JUKO MANUFACTURE COMPANY, LTD.',
+   0x00407A: u'SOCIETE D\'EXPLOITATION DU CNIT',
+   0x00407B: u'SCIENTIFIC ATLANTA',
+   0x00407C: u'QUME CORPORATION',
+   0x00407D: u'EXTENSION TECHNOLOGY CORP.',
+   0x00407E: u'EVERGREEN SYSTEMS, INC.',
+   0x00407F: u'FLIR Systems',
+   0x004080: u'ATHENIX CORPORATION',
+   0x004081: u'MANNESMANN SCANGRAPHIC GMBH',
+   0x004082: u'LABORATORY EQUIPMENT CORP.',
+   0x004083: u'TDA INDUSTRIA DE PRODUTOS',
+   0x004084: u'HONEYWELL INC.',
+   0x004085: u'SAAB INSTRUMENTS AB',
+   0x004086: u'MICHELS & KLEBERHOFF COMPUTER',
+   0x004087: u'UBITREX CORPORATION',
+   0x004088: u'MOBIUS TECHNOLOGIES, INC.',
+   0x004089: u'MEIDENSHA CORPORATION',
+   0x00408A: u'TPS TELEPROCESSING SYS. GMBH',
+   0x00408B: u'RAYLAN CORPORATION',
+   0x00408C: u'AXIS COMMUNICATIONS AB',
+   0x00408D: u'THE GOODYEAR TIRE & RUBBER CO.',
+   0x00408E: u'DIGILOG, INC.',
+   0x00408F: u'WM-DATA MINFO AB',
+   0x004090: u'ANSEL COMMUNICATIONS',
+   0x004091: u'PROCOMP INDUSTRIA ELETRONICA',
+   0x004092: u'ASP COMPUTER PRODUCTS, INC.',
+   0x004093: u'PAXDATA NETWORKS LTD.',
+   0x004094: u'SHOGRAPHICS, INC.',
+   0x004095: u'R.P.T. INTERGROUPS INT\'L LTD.',
+   0x004096: u'Cisco Systems, Inc.',
+   0x004097: u'DATEX DIVISION OF',
+   0x004098: u'DRESSLER GMBH & CO.',
+   0x004099: u'NEWGEN SYSTEMS CORP.',
+   0x00409A: u'NETWORK EXPRESS, INC.',
+   0x00409B: u'HAL COMPUTER SYSTEMS INC.',
+   0x00409C: u'TRANSWARE',
+   0x00409D: u'DIGIBOARD, INC.',
+   0x00409E: u'CONCURRENT TECHNOLOGIES  LTD.',
+   0x00409F: u'LANCAST/CASAT TECHNOLOGY, INC.',
+   0x0040A0: u'GOLDSTAR CO., LTD.',
+   0x0040A1: u'ERGO COMPUTING',
+   0x0040A2: u'KINGSTAR TECHNOLOGY INC.',
+   0x0040A3: u'MICROUNITY SYSTEMS ENGINEERING',
+   0x0040A4: u'ROSE ELECTRONICS',
+   0x0040A5: u'CLINICOMP INTL.',
+   0x0040A6: u'Cray, Inc.',
+   0x0040A7: u'ITAUTEC PHILCO S.A.',
+   0x0040A8: u'IMF INTERNATIONAL LTD.',
+   0x0040A9: u'DATACOM INC.',
+   0x0040AA: u'VALMET AUTOMATION INC.',
+   0x0040AB: u'ROLAND DG CORPORATION',
+   0x0040AC: u'SUPER WORKSTATION, INC.',
+   0x0040AD: u'SMA REGELSYSTEME GMBH',
+   0x0040AE: u'DELTA CONTROLS, INC.',
+   0x0040AF: u'DIGITAL PRODUCTS, INC.',
+   0x0040B0: u'BYTEX CORPORATION, ENGINEERING',
+   0x0040B1: u'CODONICS INC.',
+   0x0040B2: u'SYSTEMFORSCHUNG',
+   0x0040B3: u'PAR MICROSYSTEMS CORPORATION',
+   0x0040B4: u'NEXTCOM K.K.',
+   0x0040B5: u'VIDEO TECHNOLOGY COMPUTERS LTD',
+   0x0040B6: u'COMPUTERM  CORPORATION',
+   0x0040B7: u'STEALTH COMPUTER SYSTEMS',
+   0x0040B8: u'IDEA ASSOCIATES',
+   0x0040B9: u'MACQ ELECTRONIQUE SA',
+   0x0040BA: u'ALLIANT COMPUTER SYSTEMS CORP.',
+   0x0040BB: u'GOLDSTAR CABLE CO., LTD.',
+   0x0040BC: u'ALGORITHMICS LTD.',
+   0x0040BD: u'STARLIGHT NETWORKS, INC.',
+   0x0040BE: u'BOEING DEFENSE & SPACE',
+   0x0040BF: u'CHANNEL SYSTEMS INTERN\'L INC.',
+   0x0040C0: u'VISTA CONTROLS CORPORATION',
+   0x0040C1: u'BIZERBA-WERKE WILHEIM KRAUT',
+   0x0040C2: u'APPLIED COMPUTING DEVICES',
+   0x0040C3: u'FISCHER AND PORTER CO.',
+   0x0040C4: u'KINKEI SYSTEM CORPORATION',
+   0x0040C5: u'MICOM COMMUNICATIONS INC.',
+   0x0040C6: u'FIBERNET RESEARCH, INC.',
+   0x0040C7: u'RUBY TECH CORPORATION',
+   0x0040C8: u'MILAN TECHNOLOGY CORPORATION',
+   0x0040C9: u'NCUBE',
+   0x0040CA: u'FIRST INTERNAT\'L COMPUTER, INC',
+   0x0040CB: u'LANWAN TECHNOLOGIES',
+   0x0040CC: u'SILCOM MANUF\'G TECHNOLOGY INC.',
+   0x0040CD: u'TERA MICROSYSTEMS, INC.',
+   0x0040CE: u'NET-SOURCE, INC.',
+   0x0040CF: u'STRAWBERRY TREE, INC.',
+   0x0040D0: u'MITAC INTERNATIONAL CORP.',
+   0x0040D1: u'FUKUDA DENSHI CO., LTD.',
+   0x0040D2: u'PAGINE CORPORATION',
+   0x0040D3: u'KIMPSION INTERNATIONAL CORP.',
+   0x0040D4: u'GAGE TALKER CORP.',
+   0x0040D5: u'SARTORIUS AG',
+   0x0040D6: u'LOCAMATION B.V.',
+   0x0040D7: u'STUDIO GEN INC.',
+   0x0040D8: u'OCEAN OFFICE AUTOMATION LTD.',
+   0x0040D9: u'AMERICAN MEGATRENDS INC.',
+   0x0040DA: u'TELSPEC LTD',
+   0x0040DB: u'ADVANCED TECHNICAL SOLUTIONS',
+   0x0040DC: u'TRITEC ELECTRONIC GMBH',
+   0x0040DD: u'HONG TECHNOLOGIES',
+   0x0040DE: u'ELETTRONICA SAN GIORGIO',
+   0x0040DF: u'DIGALOG SYSTEMS, INC.',
+   0x0040E0: u'ATOMWIDE LTD.',
+   0x0040E1: u'MARNER INTERNATIONAL, INC.',
+   0x0040E2: u'MESA RIDGE TECHNOLOGIES, INC.',
+   0x0040E3: u'QUIN SYSTEMS LTD',
+   0x0040E4: u'E-M TECHNOLOGY, INC.',
+   0x0040E5: u'SYBUS CORPORATION',
+   0x0040E6: u'C.A.E.N.',
+   0x0040E7: u'ARNOS INSTRUMENTS & COMPUTER',
+   0x0040E8: u'CHARLES RIVER DATA SYSTEMS,INC',
+   0x0040E9: u'ACCORD SYSTEMS, INC.',
+   0x0040EA: u'PLAIN TREE SYSTEMS INC',
+   0x0040EB: u'MARTIN MARIETTA CORPORATION',
+   0x0040EC: u'MIKASA SYSTEM ENGINEERING',
+   0x0040ED: u'NETWORK CONTROLS INT\'NATL INC.',
+   0x0040EE: u'OPTIMEM',
+   0x0040EF: u'HYPERCOM, INC.',
+   0x0040F0: u'MICRO SYSTEMS, INC.',
+   0x0040F1: u'CHUO ELECTRONICS CO., LTD.',
+   0x0040F2: u'JANICH & KLASS COMPUTERTECHNIK',
+   0x0040F3: u'NETCOR',
+   0x0040F4: u'CAMEO COMMUNICATIONS, INC.',
+   0x0040F5: u'OEM ENGINES',
+   0x0040F6: u'KATRON COMPUTERS INC.',
+   0x0040F7: u'POLAROID MEDICAL IMAGING SYS.',
+   0x0040F8: u'SYSTEMHAUS DISCOM',
+   0x0040F9: u'COMBINET',
+   0x0040FA: u'MICROBOARDS, INC.',
+   0x0040FB: u'CASCADE COMMUNICATIONS CORP.',
+   0x0040FC: u'IBR COMPUTER TECHNIK GMBH',
+   0x0040FD: u'LXE',
+   0x0040FE: u'SYMPLEX COMMUNICATIONS',
+   0x0040FF: u'TELEBIT CORPORATION',
+   0x004252: u'RLX Technologies',
+   0x004501: u'Versus Technology, Inc.',
+   0x005000: u'NEXO COMMUNICATIONS, INC.',
+   0x005001: u'YAMASHITA SYSTEMS CORP.',
+   0x005002: u'OMNISEC AG',
+   0x005003: u'GRETAG MACBETH AG',
+   0x005004: u'3COM CORPORATION',
+   0x005006: u'TAC AB',
+   0x005007: u'SIEMENS TELECOMMUNICATION SYSTEMS LIMITED',
+   0x005008: u'TIVA MICROCOMPUTER CORP. (TMC)',
+   0x005009: u'PHILIPS BROADBAND NETWORKS',
+   0x00500A: u'IRIS TECHNOLOGIES, INC.',
+   0x00500B: u'CISCO SYSTEMS, INC.',
+   0x00500C: u'e-Tek Labs, Inc.',
+   0x00500D: u'SATORI ELECTORIC CO., LTD.',
+   0x00500E: u'CHROMATIS NETWORKS, INC.',
+   0x00500F: u'CISCO SYSTEMS, INC.',
+   0x005010: u'NovaNET Learning, Inc.',
+   0x005012: u'CBL - GMBH',
+   0x005013: u'Chaparral Network Storage',
+   0x005014: u'CISCO SYSTEMS, INC.',
+   0x005015: u'BRIGHT STAR ENGINEERING',
+   0x005016: u'SST/WOODHEAD INDUSTRIES',
+   0x005017: u'RSR S.R.L.',
+   0x005018: u'AMIT, Inc.',
+   0x005019: u'SPRING TIDE NETWORKS, INC.',
+   0x00501A: u'UISIQN',
+   0x00501B: u'ABL CANADA, INC.',
+   0x00501C: u'JATOM SYSTEMS, INC.',
+   0x00501E: u'Miranda Technologies, Inc.',
+   0x00501F: u'MRG SYSTEMS, LTD.',
+   0x005020: u'MEDIASTAR CO., LTD.',
+   0x005021: u'EIS INTERNATIONAL, INC.',
+   0x005022: u'ZONET TECHNOLOGY, INC.',
+   0x005023: u'PG DESIGN ELECTRONICS, INC.',
+   0x005024: u'NAVIC SYSTEMS, INC.',
+   0x005026: u'COSYSTEMS, INC.',
+   0x005027: u'GENICOM CORPORATION',
+   0x005028: u'AVAL COMMUNICATIONS',
+   0x005029: u'1394 PRINTER WORKING GROUP',
+   0x00502A: u'CISCO SYSTEMS, INC.',
+   0x00502B: u'GENRAD LTD.',
+   0x00502C: u'SOYO COMPUTER, INC.',
+   0x00502D: u'ACCEL, INC.',
+   0x00502E: u'CAMBEX CORPORATION',
+   0x00502F: u'TollBridge Technologies, Inc.',
+   0x005030: u'FUTURE PLUS SYSTEMS',
+   0x005031: u'AEROFLEX LABORATORIES, INC.',
+   0x005032: u'PICAZO COMMUNICATIONS, INC.',
+   0x005033: u'MAYAN NETWORKS',
+   0x005036: u'NETCAM, LTD.',
+   0x005037: u'KOGA ELECTRONICS CO.',
+   0x005038: u'DAIN TELECOM CO., LTD.',
+   0x005039: u'MARINER NETWORKS',
+   0x00503A: u'DATONG ELECTRONICS LTD.',
+   0x00503B: u'MEDIAFIRE CORPORATION',
+   0x00503C: u'TSINGHUA NOVEL ELECTRONICS',
+   0x00503E: u'CISCO SYSTEMS, INC.',
+   0x00503F: u'ANCHOR GAMES',
+   0x005040: u'Matsushita Electric Works, Ltd.',
+   0x005041: u'Coretronic Corporation',
+   0x005042: u'SCI MANUFACTURING SINGAPORE PTE, LTD.',
+   0x005043: u'MARVELL SEMICONDUCTOR, INC.',
+   0x005044: u'ASACA CORPORATION',
+   0x005045: u'RIOWORKS SOLUTIONS, INC.',
+   0x005046: u'MENICX INTERNATIONAL CO., LTD.',
+   0x005047: u'PRIVATE',
+   0x005048: u'INFOLIBRIA',
+   0x005049: u'ELLACOYA NETWORKS, INC.',
+   0x00504A: u'ELTECO A.S.',
+   0x00504B: u'BARCONET N.V.',
+   0x00504C: u'GALIL MOTION CONTROL, INC.',
+   0x00504D: u'TOKYO ELECTRON DEVICE LTD.',
+   0x00504E: u'SIERRA MONITOR CORP.',
+   0x00504F: u'OLENCOM ELECTRONICS',
+   0x005050: u'CISCO SYSTEMS, INC.',
+   0x005051: u'IWATSU ELECTRIC CO., LTD.',
+   0x005052: u'TIARA NETWORKS, INC.',
+   0x005053: u'CISCO SYSTEMS, INC.',
+   0x005054: u'CISCO SYSTEMS, INC.',
+   0x005055: u'DOMS A/S',
+   0x005056: u'VMWare, Inc.',
+   0x005057: u'BROADBAND ACCESS SYSTEMS',
+   0x005058: u'VegaStream Limted',
+   0x005059: u'iBAHN',
+   0x00505A: u'NETWORK ALCHEMY, INC.',
+   0x00505B: u'KAWASAKI LSI U.S.A., INC.',
+   0x00505C: u'TUNDO CORPORATION',
+   0x00505E: u'DIGITEK MICROLOGIC S.A.',
+   0x00505F: u'BRAND INNOVATORS',
+   0x005060: u'TANDBERG TELECOM AS',
+   0x005062: u'KOUWELL ELECTRONICS CORP.  **',
+   0x005063: u'OY COMSEL SYSTEM AB',
+   0x005064: u'CAE ELECTRONICS',
+   0x005065: u'DENSEI-LAMBAD Co., Ltd.',
+   0x005066: u'AtecoM GmbH advanced telecomunication modules',
+   0x005067: u'AEROCOMM, INC.',
+   0x005068: u'ELECTRONIC INDUSTRIES ASSOCIATION',
+   0x005069: u'PixStream Incorporated',
+   0x00506A: u'EDEVA, INC.',
+   0x00506B: u'SPX-ATEG',
+   0x00506C: u'G & L BEIJER ELECTRONICS AB',
+   0x00506D: u'VIDEOJET SYSTEMS',
+   0x00506E: u'CORDER ENGINEERING CORPORATION',
+   0x00506F: u'G-CONNECT',
+   0x005070: u'CHAINTECH COMPUTER CO., LTD.',
+   0x005071: u'AIWA CO., LTD.',
+   0x005072: u'CORVIS CORPORATION',
+   0x005073: u'CISCO SYSTEMS, INC.',
+   0x005074: u'ADVANCED HI-TECH CORP.',
+   0x005075: u'KESTREL SOLUTIONS',
+   0x005076: u'IBM',
+   0x005077: u'PROLIFIC TECHNOLOGY, INC.',
+   0x005078: u'MEGATON HOUSE, LTD.',
+   0x005079: u'PRIVATE',
+   0x00507A: u'XPEED, INC.',
+   0x00507B: u'MERLOT COMMUNICATIONS',
+   0x00507C: u'VIDEOCON AG',
+   0x00507D: u'IFP',
+   0x00507E: u'NEWER TECHNOLOGY',
+   0x00507F: u'DrayTek Corp.',
+   0x005080: u'CISCO SYSTEMS, INC.',
+   0x005081: u'MURATA MACHINERY, LTD.',
+   0x005082: u'FORESSON CORPORATION',
+   0x005083: u'GILBARCO, INC.',
+   0x005084: u'ATL PRODUCTS',
+   0x005086: u'TELKOM SA, LTD.',
+   0x005087: u'TERASAKI ELECTRIC CO., LTD.',
+   0x005088: u'AMANO CORPORATION',
+   0x005089: u'SAFETY MANAGEMENT SYSTEMS',
+   0x00508B: u'COMPAQ COMPUTER CORPORATION',
+   0x00508C: u'RSI SYSTEMS',
+   0x00508D: u'ABIT COMPUTER CORPORATION',
+   0x00508E: u'OPTIMATION, INC.',
+   0x00508F: u'ASITA TECHNOLOGIES INT\'L LTD.',
+   0x005090: u'DCTRI',
+   0x005091: u'NETACCESS, INC.',
+   0x005092: u'RIGAKU INDUSTRIAL CORPORATION',
+   0x005093: u'BOEING',
+   0x005094: u'PACE MICRO TECHNOLOGY PLC',
+   0x005095: u'PERACOM NETWORKS',
+   0x005096: u'SALIX TECHNOLOGIES, INC.',
+   0x005097: u'MMC-EMBEDDED COMPUTERTECHNIK GmbH',
+   0x005098: u'GLOBALOOP, LTD.',
+   0x005099: u'3COM EUROPE, LTD.',
+   0x00509A: u'TAG ELECTRONIC SYSTEMS',
+   0x00509B: u'SWITCHCORE AB',
+   0x00509C: u'BETA RESEARCH',
+   0x00509D: u'THE INDUSTREE B.V.',
+   0x00509E: u'Les Technologies SoftAcoustik Inc.',
+   0x00509F: u'HORIZON COMPUTER',
+   0x0050A0: u'DELTA COMPUTER SYSTEMS, INC.',
+   0x0050A1: u'CARLO GAVAZZI, INC.',
+   0x0050A2: u'CISCO SYSTEMS, INC.',
+   0x0050A3: u'TransMedia Communications, Inc.',
+   0x0050A4: u'IO TECH, INC.',
+   0x0050A5: u'CAPITOL BUSINESS SYSTEMS, LTD.',
+   0x0050A6: u'OPTRONICS',
+   0x0050A7: u'CISCO SYSTEMS, INC.',
+   0x0050A8: u'OpenCon Systems, Inc.',
+   0x0050A9: u'MOLDAT WIRELESS TECHNOLGIES',
+   0x0050AA: u'KONICA MINOLTA HOLDINGS, INC.',
+   0x0050AB: u'NALTEC, INC.',
+   0x0050AC: u'MAPLE COMPUTER CORPORATION',
+   0x0050AD: u'CommUnique Wireless Corp.',
+   0x0050AE: u'IWAKI ELECTRONICS CO., LTD.',
+   0x0050AF: u'INTERGON, INC.',
+   0x0050B0: u'TECHNOLOGY ATLANTA CORPORATION',
+   0x0050B1: u'GIDDINGS & LEWIS',
+   0x0050B2: u'BRODEL AUTOMATION',
+   0x0050B3: u'VOICEBOARD CORPORATION',
+   0x0050B4: u'SATCHWELL CONTROL SYSTEMS, LTD',
+   0x0050B5: u'FICHET-BAUCHE',
+   0x0050B6: u'GOOD WAY IND. CO., LTD.',
+   0x0050B7: u'BOSER TECHNOLOGY CO., LTD.',
+   0x0050B8: u'INOVA COMPUTERS GMBH & CO. KG',
+   0x0050B9: u'XITRON TECHNOLOGIES, INC.',
+   0x0050BA: u'D-LINK',
+   0x0050BB: u'CMS TECHNOLOGIES',
+   0x0050BC: u'HAMMER STORAGE SOLUTIONS',
+   0x0050BD: u'CISCO SYSTEMS, INC.',
+   0x0050BE: u'FAST MULTIMEDIA AG',
+   0x0050BF: u'MOTOTECH INC.',
+   0x0050C0: u'GATAN, INC.',
+   0x0050C1: u'GEMFLEX NETWORKS, LTD.',
+   0x0050C2: u'IEEE REGISTRATION AUTHORITY',
+   0x0050C4: u'IMD',
+   0x0050C5: u'ADS TECHNOLOGIES, INC.',
+   0x0050C6: u'LOOP TELECOMMUNICATION INTERNATIONAL, INC.',
+   0x0050C8: u'ADDONICS COMMUNICATIONS, INC.',
+   0x0050C9: u'MASPRO DENKOH CORP.',
+   0x0050CA: u'NET TO NET TECHNOLOGIES',
+   0x0050CB: u'JETTER',
+   0x0050CC: u'XYRATEX',
+   0x0050CD: u'DIGIANSWER A/S',
+   0x0050CE: u'LG INTERNATIONAL CORP.',
+   0x0050CF: u'VANLINK COMMUNICATION TECHNOLOGY RESEARCH INSTITUTE',
+   0x0050D0: u'MINERVA SYSTEMS',
+   0x0050D1: u'CISCO SYSTEMS, INC.',
+   0x0050D2: u'CMC Electronics Inc',
+   0x0050D3: u'DIGITAL AUDIO PROCESSING PTY. LTD.',
+   0x0050D4: u'JOOHONG INFORMATION &',
+   0x0050D5: u'AD SYSTEMS CORP.',
+   0x0050D6: u'ATLAS COPCO TOOLS AB',
+   0x0050D7: u'TELSTRAT',
+   0x0050D8: u'UNICORN COMPUTER CORP.',
+   0x0050D9: u'ENGETRON-ENGENHARIA ELETRONICA IND. e COM. LTDA',
+   0x0050DA: u'3COM CORPORATION',
+   0x0050DB: u'CONTEMPORARY CONTROL',
+   0x0050DC: u'TAS TELEFONBAU A. SCHWABE GMBH & CO. KG',
+   0x0050DD: u'SERRA SOLDADURA, S.A.',
+   0x0050DE: u'SIGNUM SYSTEMS CORP.',
+   0x0050DF: u'AirFiber, Inc.',
+   0x0050E1: u'NS TECH ELECTRONICS SDN BHD',
+   0x0050E2: u'CISCO SYSTEMS, INC.',
+   0x0050E3: u'Terayon Communications Systems',
+   0x0050E4: u'APPLE COMPUTER, INC.',
+   0x0050E6: u'HAKUSAN CORPORATION',
+   0x0050E7: u'PARADISE INNOVATIONS (ASIA)',
+   0x0050E8: u'NOMADIX INC.',
+   0x0050EA: u'XEL COMMUNICATIONS, INC.',
+   0x0050EB: u'ALPHA-TOP CORPORATION',
+   0x0050EC: u'OLICOM A/S',
+   0x0050ED: u'ANDA NETWORKS',
+   0x0050EE: u'TEK DIGITEL CORPORATION',
+   0x0050EF: u'SPE Systemhaus GmbH',
+   0x0050F0: u'CISCO SYSTEMS, INC.',
+   0x0050F1: u'LIBIT SIGNAL PROCESSING, LTD.',
+   0x0050F2: u'MICROSOFT CORP.',
+   0x0050F3: u'GLOBAL NET INFORMATION CO., Ltd.',
+   0x0050F4: u'SIGMATEK GMBH & CO. KG',
+   0x0050F6: u'PAN-INTERNATIONAL INDUSTRIAL CORP.',
+   0x0050F7: u'VENTURE MANUFACTURING (SINGAPORE) LTD.',
+   0x0050F8: u'ENTREGA TECHNOLOGIES, INC.',
+   0x0050F9: u'SENSORMATIC ACD',
+   0x0050FA: u'OXTEL, LTD.',
+   0x0050FB: u'VSK ELECTRONICS',
+   0x0050FC: u'EDIMAX TECHNOLOGY CO., LTD.',
+   0x0050FD: u'VISIONCOMM CO., LTD.',
+   0x0050FE: u'PCTVnet ASA',
+   0x0050FF: u'HAKKO ELECTRONICS CO., LTD.',
+   0x006000: u'XYCOM INC.',
+   0x006001: u'InnoSys, Inc.',
+   0x006002: u'SCREEN SUBTITLING SYSTEMS, LTD',
+   0x006003: u'TERAOKA WEIGH SYSTEM PTE, LTD.',
+   0x006004: u'COMPUTADORES MODULARES SA',
+   0x006005: u'FEEDBACK DATA LTD.',
+   0x006006: u'SOTEC CO., LTD',
+   0x006007: u'ACRES GAMING, INC.',
+   0x006008: u'3COM CORPORATION',
+   0x006009: u'CISCO SYSTEMS, INC.',
+   0x00600A: u'SORD COMPUTER CORPORATION',
+   0x00600B: u'LOGWARE GmbH',
+   0x00600C: u'APPLIED DATA SYSTEMS, INC.',
+   0x00600D: u'Digital Logic GmbH',
+   0x00600E: u'WAVENET INTERNATIONAL, INC.',
+   0x00600F: u'WESTELL, INC.',
+   0x006010: u'NETWORK MACHINES, INC.',
+   0x006011: u'CRYSTAL SEMICONDUCTOR CORP.',
+   0x006012: u'POWER COMPUTING CORPORATION',
+   0x006013: u'NETSTAL MASCHINEN AG',
+   0x006014: u'EDEC CO., LTD.',
+   0x006015: u'NET2NET CORPORATION',
+   0x006016: u'CLARIION',
+   0x006017: u'TOKIMEC INC.',
+   0x006018: u'STELLAR ONE CORPORATION',
+   0x006019: u'Roche Diagnostics',
+   0x00601A: u'KEITHLEY INSTRUMENTS',
+   0x00601B: u'MESA ELECTRONICS',
+   0x00601C: u'TELXON CORPORATION',
+   0x00601D: u'LUCENT TECHNOLOGIES',
+   0x00601E: u'SOFTLAB, INC.',
+   0x00601F: u'STALLION TECHNOLOGIES',
+   0x006020: u'PIVOTAL NETWORKING, INC.',
+   0x006021: u'DSC CORPORATION',
+   0x006022: u'VICOM SYSTEMS, INC.',
+   0x006023: u'PERICOM SEMICONDUCTOR CORP.',
+   0x006024: u'GRADIENT TECHNOLOGIES, INC.',
+   0x006025: u'ACTIVE IMAGING PLC',
+   0x006026: u'VIKING COMPONENTS, INC.',
+   0x006027: u'Superior Modular Products',
+   0x006028: u'MACROVISION CORPORATION',
+   0x006029: u'CARY PERIPHERALS INC.',
+   0x00602A: u'SYMICRON COMPUTER COMMUNICATIONS, LTD.',
+   0x00602B: u'PEAK AUDIO',
+   0x00602C: u'LINX Data Terminals, Inc.',
+   0x00602D: u'ALERTON TECHNOLOGIES, INC.',
+   0x00602E: u'CYCLADES CORPORATION',
+   0x00602F: u'CISCO SYSTEMS, INC.',
+   0x006030: u'VILLAGE TRONIC ENTWICKLUNG',
+   0x006031: u'HRK SYSTEMS',
+   0x006032: u'I-CUBE, INC.',
+   0x006033: u'ACUITY IMAGING, INC.',
+   0x006034: u'ROBERT BOSCH GmbH',
+   0x006035: u'DALLAS SEMICONDUCTOR, INC.',
+   0x006036: u'AUSTRIAN RESEARCH CENTER SEIBERSDORF',
+   0x006037: u'NXP Semiconductors',
+   0x006038: u'Nortel Networks',
+   0x006039: u'SanCom Technology, Inc.',
+   0x00603A: u'QUICK CONTROLS LTD.',
+   0x00603B: u'AMTEC spa',
+   0x00603C: u'HAGIWARA SYS-COM CO., LTD.',
+   0x00603D: u'3CX',
+   0x00603E: u'CISCO SYSTEMS, INC.',
+   0x00603F: u'PATAPSCO DESIGNS',
+   0x006040: u'NETRO CORP.',
+   0x006041: u'Yokogawa Electric Corporation',
+   0x006042: u'TKS (USA), INC.',
+   0x006043: u'ComSoft Systems, Inc.',
+   0x006044: u'LITTON/POLY-SCIENTIFIC',
+   0x006045: u'PATHLIGHT TECHNOLOGIES',
+   0x006046: u'VMETRO, INC.',
+   0x006047: u'CISCO SYSTEMS, INC.',
+   0x006048: u'EMC CORPORATION',
+   0x006049: u'VINA TECHNOLOGIES',
+   0x00604A: u'SAIC IDEAS GROUP',
+   0x00604B: u'Safe-com GmbH & Co. KG',
+   0x00604C: u'SAGEM SA',
+   0x00604D: u'MMC NETWORKS, INC.',
+   0x00604E: u'CYCLE COMPUTER CORPORATION, INC.',
+   0x00604F: u'SUZUKI MFG. CO., LTD.',
+   0x006050: u'INTERNIX INC.',
+   0x006051: u'QUALITY SEMICONDUCTOR',
+   0x006052: u'PERIPHERALS ENTERPRISE CO., Ltd.',
+   0x006053: u'TOYODA MACHINE WORKS, LTD.',
+   0x006054: u'CONTROLWARE GMBH',
+   0x006055: u'CORNELL UNIVERSITY',
+   0x006056: u'NETWORK TOOLS, INC.',
+   0x006057: u'MURATA MANUFACTURING CO., LTD.',
+   0x006058: u'COPPER MOUNTAIN COMMUNICATIONS, INC.',
+   0x006059: u'TECHNICAL COMMUNICATIONS CORP.',
+   0x00605A: u'CELCORE, INC.',
+   0x00605B: u'IntraServer Technology, Inc.',
+   0x00605C: u'CISCO SYSTEMS, INC.',
+   0x00605D: u'SCANIVALVE CORP.',
+   0x00605E: u'LIBERTY TECHNOLOGY NETWORKING',
+   0x00605F: u'NIPPON UNISOFT CORPORATION',
+   0x006060: u'DAWNING TECHNOLOGIES, INC.',
+   0x006061: u'WHISTLE COMMUNICATIONS CORP.',
+   0x006062: u'TELESYNC, INC.',
+   0x006063: u'PSION DACOM PLC.',
+   0x006064: u'NETCOMM LIMITED',
+   0x006065: u'BERNECKER & RAINER INDUSTRIE-ELEKTRONIC GmbH',
+   0x006066: u'LACROIX TECHNOLGIE',
+   0x006067: u'ACER NETXUS INC.',
+   0x006068: u'EICON TECHNOLOGY CORPORATION',
+   0x006069: u'BROCADE COMMUNICATIONS SYSTEMS, Inc.',
+   0x00606A: u'MITSUBISHI WIRELESS COMMUNICATIONS. INC.',
+   0x00606B: u'Synclayer Inc.',
+   0x00606C: u'ARESCOM',
+   0x00606D: u'DIGITAL EQUIPMENT CORP.',
+   0x00606E: u'DAVICOM SEMICONDUCTOR, INC.',
+   0x00606F: u'CLARION CORPORATION OF AMERICA',
+   0x006070: u'CISCO SYSTEMS, INC.',
+   0x006071: u'MIDAS LAB, INC.',
+   0x006072: u'VXL INSTRUMENTS, LIMITED',
+   0x006073: u'REDCREEK COMMUNICATIONS, INC.',
+   0x006074: u'QSC AUDIO PRODUCTS',
+   0x006075: u'PENTEK, INC.',
+   0x006076: u'SCHLUMBERGER TECHNOLOGIES RETAIL PETROLEUM SYSTEMS',
+   0x006077: u'PRISA NETWORKS',
+   0x006078: u'POWER MEASUREMENT LTD.',
+   0x006079: u'Mainstream Data, Inc.',
+   0x00607A: u'DVS GmbH',
+   0x00607B: u'FORE SYSTEMS, INC.',
+   0x00607C: u'WaveAccess, Ltd.',
+   0x00607D: u'SENTIENT NETWORKS INC.',
+   0x00607E: u'GIGALABS, INC.',
+   0x00607F: u'AURORA TECHNOLOGIES, INC.',
+   0x006080: u'MICROTRONIX DATACOM LTD.',
+   0x006081: u'TV/COM INTERNATIONAL',
+   0x006082: u'NOVALINK TECHNOLOGIES, INC.',
+   0x006083: u'CISCO SYSTEMS, INC.',
+   0x006084: u'DIGITAL VIDEO',
+   0x006085: u'Storage Concepts',
+   0x006086: u'LOGIC REPLACEMENT TECH. LTD.',
+   0x006087: u'KANSAI ELECTRIC CO., LTD.',
+   0x006088: u'WHITE MOUNTAIN DSP, INC.',
+   0x006089: u'XATA',
+   0x00608A: u'CITADEL COMPUTER',
+   0x00608B: u'ConferTech International',
+   0x00608C: u'3COM CORPORATION',
+   0x00608D: u'UNIPULSE CORP.',
+   0x00608E: u'HE ELECTRONICS, TECHNOLOGIE & SYSTEMTECHNIK GmbH',
+   0x00608F: u'TEKRAM TECHNOLOGY CO., LTD.',
+   0x006090: u'ABLE COMMUNICATIONS, INC.',
+   0x006091: u'FIRST PACIFIC NETWORKS, INC.',
+   0x006092: u'MICRO/SYS, INC.',
+   0x006093: u'VARIAN',
+   0x006094: u'IBM CORP.',
+   0x006095: u'ACCU-TIME SYSTEMS, INC.',
+   0x006096: u'T.S. MICROTECH INC.',
+   0x006097: u'3COM CORPORATION',
+   0x006098: u'HT COMMUNICATIONS',
+   0x006099: u'SBE, Inc.',
+   0x00609A: u'NJK TECHNO CO.',
+   0x00609B: u'ASTRO-MED, INC.',
+   0x00609C: u'Perkin-Elmer Incorporated',
+   0x00609D: u'PMI FOOD EQUIPMENT GROUP',
+   0x00609E: u'ASC X3 - INFORMATION TECHNOLOGY STANDARDS SECRETARIATS',
+   0x00609F: u'PHAST CORPORATION',
+   0x0060A0: u'SWITCHED NETWORK TECHNOLOGIES, INC.',
+   0x0060A1: u'VPNet, Inc.',
+   0x0060A2: u'NIHON UNISYS LIMITED CO.',
+   0x0060A3: u'CONTINUUM TECHNOLOGY CORP.',
+   0x0060A4: u'GRINAKER SYSTEM TECHNOLOGIES',
+   0x0060A5: u'PERFORMANCE TELECOM CORP.',
+   0x0060A6: u'PARTICLE MEASURING SYSTEMS',
+   0x0060A7: u'MICROSENS GmbH & CO. KG',
+   0x0060A8: u'TIDOMAT AB',
+   0x0060A9: u'GESYTEC MbH',
+   0x0060AA: u'INTELLIGENT DEVICES INC. (IDI)',
+   0x0060AB: u'LARSCOM INCORPORATED',
+   0x0060AC: u'RESILIENCE CORPORATION',
+   0x0060AD: u'MegaChips Corporation',
+   0x0060AE: u'TRIO INFORMATION SYSTEMS AB',
+   0x0060AF: u'PACIFIC MICRO DATA, INC.',
+   0x0060B0: u'HEWLETT-PACKARD CO.',
+   0x0060B1: u'INPUT/OUTPUT, INC.',
+   0x0060B2: u'PROCESS CONTROL CORP.',
+   0x0060B3: u'Z-COM, INC.',
+   0x0060B4: u'GLENAYRE R&D INC.',
+   0x0060B5: u'KEBA GmbH',
+   0x0060B6: u'LAND COMPUTER CO., LTD.',
+   0x0060B7: u'CHANNELMATIC, INC.',
+   0x0060B8: u'CORELIS INC.',
+   0x0060B9: u'NITSUKO CORPORATION',
+   0x0060BA: u'SAHARA NETWORKS, INC.',
+   0x0060BB: u'CABLETRON - NETLINK, INC.',
+   0x0060BC: u'KeunYoung Electronics & Communication Co., Ltd.',
+   0x0060BD: u'HUBBELL-PULSECOM',
+   0x0060BE: u'WEBTRONICS',
+   0x0060BF: u'MACRAIGOR SYSTEMS, INC.',
+   0x0060C0: u'NERA AS',
+   0x0060C1: u'WaveSpan Corporation',
+   0x0060C2: u'MPL AG',
+   0x0060C3: u'NETVISION CORPORATION',
+   0x0060C4: u'SOLITON SYSTEMS K.K.',
+   0x0060C5: u'ANCOT CORP.',
+   0x0060C6: u'DCS AG',
+   0x0060C7: u'AMATI COMMUNICATIONS CORP.',
+   0x0060C8: u'KUKA WELDING SYSTEMS & ROBOTS',
+   0x0060C9: u'ControlNet, Inc.',
+   0x0060CA: u'HARMONIC SYSTEMS INCORPORATED',
+   0x0060CB: u'HITACHI ZOSEN CORPORATION',
+   0x0060CC: u'EMTRAK, INCORPORATED',
+   0x0060CD: u'VideoServer, Inc.',
+   0x0060CE: u'ACCLAIM COMMUNICATIONS',
+   0x0060CF: u'ALTEON NETWORKS, INC.',
+   0x0060D0: u'SNMP RESEARCH INCORPORATED',
+   0x0060D1: u'CASCADE COMMUNICATIONS',
+   0x0060D2: u'LUCENT TECHNOLOGIES TAIWAN TELECOMMUNICATIONS CO., LTD.',
+   0x0060D3: u'AT&T',
+   0x0060D4: u'ELDAT COMMUNICATION LTD.',
+   0x0060D5: u'MIYACHI TECHNOS CORP.',
+   0x0060D6: u'NovAtel Wireless Technologies Ltd.',
+   0x0060D7: u'ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE (EPFL)',
+   0x0060D8: u'ELMIC SYSTEMS, INC.',
+   0x0060D9: u'TRANSYS NETWORKS INC.',
+   0x0060DA: u'JBM ELECTRONICS CO.',
+   0x0060DB: u'NTP ELEKTRONIK A/S',
+   0x0060DC: u'Toyo Network Systems Co, Ltd.',
+   0x0060DD: u'MYRICOM, INC.',
+   0x0060DE: u'KAYSER-THREDE GmbH',
+   0x0060DF: u'CNT Corporation',
+   0x0060E0: u'AXIOM TECHNOLOGY CO., LTD.',
+   0x0060E1: u'ORCKIT COMMUNICATIONS LTD.',
+   0x0060E2: u'QUEST ENGINEERING & DEVELOPMENT',
+   0x0060E3: u'ARBIN INSTRUMENTS',
+   0x0060E4: u'COMPUSERVE, INC.',
+   0x0060E5: u'FUJI AUTOMATION CO., LTD.',
+   0x0060E6: u'SHOMITI SYSTEMS INCORPORATED',
+   0x0060E7: u'RANDATA',
+   0x0060E8: u'HITACHI COMPUTER PRODUCTS (AMERICA), INC.',
+   0x0060E9: u'ATOP TECHNOLOGIES, INC.',
+   0x0060EA: u'StreamLogic',
+   0x0060EB: u'FOURTHTRACK SYSTEMS',
+   0x0060EC: u'HERMARY OPTO ELECTRONICS INC.',
+   0x0060ED: u'RICARDO TEST AUTOMATION LTD.',
+   0x0060EE: u'APOLLO',
+   0x0060EF: u'FLYTECH TECHNOLOGY CO., LTD.',
+   0x0060F0: u'JOHNSON & JOHNSON MEDICAL, INC',
+   0x0060F1: u'EXP COMPUTER, INC.',
+   0x0060F2: u'LASERGRAPHICS, INC.',
+   0x0060F3: u'Performance Analysis Broadband, Spirent plc',
+   0x0060F4: u'ADVANCED COMPUTER SOLUTIONS, Inc.',
+   0x0060F5: u'ICON WEST, INC.',
+   0x0060F6: u'NEXTEST COMMUNICATIONS PRODUCTS, INC.',
+   0x0060F7: u'DATAFUSION SYSTEMS',
+   0x0060F8: u'Loran International Technologies Inc.',
+   0x0060F9: u'DIAMOND LANE COMMUNICATIONS',
+   0x0060FA: u'EDUCATIONAL TECHNOLOGY RESOURCES, INC.',
+   0x0060FB: u'PACKETEER, INC.',
+   0x0060FC: u'CONSERVATION THROUGH INNOVATION LTD.',
+   0x0060FD: u'NetICs, Inc.',
+   0x0060FE: u'LYNX SYSTEM DEVELOPERS, INC.',
+   0x0060FF: u'QuVis, Inc.',
+   0x0070B0: u'M/A-COM INC. COMPANIES',
+   0x0070B3: u'DATA RECALL LTD.',
+   0x008000: u'MULTITECH SYSTEMS, INC.',
+   0x008001: u'PERIPHONICS CORPORATION',
+   0x008002: u'SATELCOM (UK) LTD',
+   0x008003: u'HYTEC ELECTRONICS LTD.',
+   0x008004: u'ANTLOW COMMUNICATIONS, LTD.',
+   0x008005: u'CACTUS COMPUTER INC.',
+   0x008006: u'COMPUADD CORPORATION',
+   0x008007: u'DLOG NC-SYSTEME',
+   0x008008: u'DYNATECH COMPUTER SYSTEMS',
+   0x008009: u'JUPITER SYSTEMS, INC.',
+   0x00800A: u'JAPAN COMPUTER CORP.',
+   0x00800B: u'CSK CORPORATION',
+   0x00800C: u'VIDECOM LIMITED',
+   0x00800D: u'VOSSWINKEL F.U.',
+   0x00800E: u'ATLANTIX CORPORATION',
+   0x00800F: u'STANDARD MICROSYSTEMS',
+   0x008010: u'COMMODORE INTERNATIONAL',
+   0x008011: u'DIGITAL SYSTEMS INT\'L. INC.',
+   0x008012: u'INTEGRATED MEASUREMENT SYSTEMS',
+   0x008013: u'THOMAS-CONRAD CORPORATION',
+   0x008014: u'ESPRIT SYSTEMS',
+   0x008015: u'SEIKO SYSTEMS, INC.',
+   0x008016: u'WANDEL AND GOLTERMANN',
+   0x008017: u'PFU LIMITED',
+   0x008018: u'KOBE STEEL, LTD.',
+   0x008019: u'DAYNA COMMUNICATIONS, INC.',
+   0x00801A: u'BELL ATLANTIC',
+   0x00801B: u'KODIAK TECHNOLOGY',
+   0x00801C: u'NEWPORT SYSTEMS SOLUTIONS',
+   0x00801D: u'INTEGRATED INFERENCE MACHINES',
+   0x00801E: u'XINETRON, INC.',
+   0x00801F: u'KRUPP ATLAS ELECTRONIK GMBH',
+   0x008020: u'NETWORK PRODUCTS',
+   0x008021: u'Alcatel Canada Inc.',
+   0x008022: u'SCAN-OPTICS',
+   0x008023: u'INTEGRATED BUSINESS NETWORKS',
+   0x008024: u'KALPANA, INC.',
+   0x008025: u'STOLLMANN GMBH',
+   0x008026: u'NETWORK PRODUCTS CORPORATION',
+   0x008027: u'ADAPTIVE SYSTEMS, INC.',
+   0x008028: u'TRADPOST (HK) LTD',
+   0x008029: u'EAGLE TECHNOLOGY, INC.',
+   0x00802A: u'TEST SYSTEMS & SIMULATIONS INC',
+   0x00802B: u'INTEGRATED MARKETING CO',
+   0x00802C: u'THE SAGE GROUP PLC',
+   0x00802D: u'XYLOGICS INC',
+   0x00802E: u'CASTLE ROCK COMPUTING',
+   0x00802F: u'NATIONAL INSTRUMENTS CORP.',
+   0x008030: u'NEXUS ELECTRONICS',
+   0x008031: u'BASYS, CORP.',
+   0x008032: u'ACCESS CO., LTD.',
+   0x008033: u'FORMATION, INC.',
+   0x008034: u'SMT GOUPIL',
+   0x008035: u'TECHNOLOGY WORKS, INC.',
+   0x008036: u'REFLEX MANUFACTURING SYSTEMS',
+   0x008037: u'Ericsson Group',
+   0x008038: u'DATA RESEARCH & APPLICATIONS',
+   0x008039: u'ALCATEL STC AUSTRALIA',
+   0x00803A: u'VARITYPER, INC.',
+   0x00803B: u'APT COMMUNICATIONS, INC.',
+   0x00803C: u'TVS ELECTRONICS LTD',
+   0x00803D: u'SURIGIKEN CO.,  LTD.',
+   0x00803E: u'SYNERNETICS',
+   0x00803F: u'TATUNG COMPANY',
+   0x008040: u'JOHN FLUKE MANUFACTURING CO.',
+   0x008041: u'VEB KOMBINAT ROBOTRON',
+   0x008042: u'FORCE COMPUTERS',
+   0x008043: u'NETWORLD, INC.',
+   0x008044: u'SYSTECH COMPUTER CORP.',
+   0x008045: u'MATSUSHITA ELECTRIC IND. CO',
+   0x008046: u'UNIVERSITY OF TORONTO',
+   0x008047: u'IN-NET CORP.',
+   0x008048: u'COMPEX INCORPORATED',
+   0x008049: u'NISSIN ELECTRIC CO., LTD.',
+   0x00804A: u'PRO-LOG',
+   0x00804B: u'EAGLE TECHNOLOGIES PTY.LTD.',
+   0x00804C: u'CONTEC CO., LTD.',
+   0x00804D: u'CYCLONE MICROSYSTEMS, INC.',
+   0x00804E: u'APEX COMPUTER COMPANY',
+   0x00804F: u'DAIKIN INDUSTRIES, LTD.',
+   0x008050: u'ZIATECH CORPORATION',
+   0x008051: u'FIBERMUX',
+   0x008052: u'TECHNICALLY ELITE CONCEPTS',
+   0x008053: u'INTELLICOM, INC.',
+   0x008054: u'FRONTIER TECHNOLOGIES CORP.',
+   0x008055: u'FERMILAB',
+   0x008056: u'SPHINX ELEKTRONIK GMBH',
+   0x008057: u'ADSOFT, LTD.',
+   0x008058: u'PRINTER SYSTEMS CORPORATION',
+   0x008059: u'STANLEY ELECTRIC CO., LTD',
+   0x00805A: u'TULIP COMPUTERS INTERNAT\'L B.V',
+   0x00805B: u'CONDOR SYSTEMS, INC.',
+   0x00805C: u'AGILIS CORPORATION',
+   0x00805D: u'CANSTAR',
+   0x00805E: u'LSI LOGIC CORPORATION',
+   0x00805F: u'COMPAQ COMPUTER CORPORATION',
+   0x008060: u'NETWORK INTERFACE CORPORATION',
+   0x008061: u'LITTON SYSTEMS, INC.',
+   0x008062: u'INTERFACE  CO.',
+   0x008063: u'RICHARD HIRSCHMANN GMBH & CO.',
+   0x008064: u'WYSE TECHNOLOGY',
+   0x008065: u'CYBERGRAPHIC SYSTEMS PTY LTD.',
+   0x008066: u'ARCOM CONTROL SYSTEMS, LTD.',
+   0x008067: u'SQUARE D COMPANY',
+   0x008068: u'YAMATECH SCIENTIFIC LTD.',
+   0x008069: u'COMPUTONE SYSTEMS',
+   0x00806A: u'ERI (EMPAC RESEARCH INC.)',
+   0x00806B: u'SCHMID TELECOMMUNICATION',
+   0x00806C: u'CEGELEC PROJECTS LTD',
+   0x00806D: u'CENTURY SYSTEMS CORP.',
+   0x00806E: u'NIPPON STEEL CORPORATION',
+   0x00806F: u'ONELAN LTD.',
+   0x008070: u'COMPUTADORAS MICRON',
+   0x008071: u'SAI TECHNOLOGY',
+   0x008072: u'MICROPLEX SYSTEMS LTD.',
+   0x008073: u'DWB ASSOCIATES',
+   0x008074: u'FISHER CONTROLS',
+   0x008075: u'PARSYTEC GMBH',
+   0x008076: u'MCNC',
+   0x008077: u'BROTHER INDUSTRIES, LTD.',
+   0x008078: u'PRACTICAL PERIPHERALS, INC.',
+   0x008079: u'MICROBUS DESIGNS LTD.',
+   0x00807A: u'AITECH SYSTEMS LTD.',
+   0x00807B: u'ARTEL COMMUNICATIONS CORP.',
+   0x00807C: u'FIBERCOM, INC.',
+   0x00807D: u'EQUINOX SYSTEMS INC.',
+   0x00807E: u'SOUTHERN PACIFIC LTD.',
+   0x00807F: u'DY-4 INCORPORATED',
+   0x008080: u'DATAMEDIA CORPORATION',
+   0x008081: u'KENDALL SQUARE RESEARCH CORP.',
+   0x008082: u'PEP MODULAR COMPUTERS GMBH',
+   0x008083: u'AMDAHL',
+   0x008084: u'THE CLOUD INC.',
+   0x008085: u'H-THREE SYSTEMS CORPORATION',
+   0x008086: u'COMPUTER GENERATION INC.',
+   0x008087: u'OKI ELECTRIC INDUSTRY CO., LTD',
+   0x008088: u'VICTOR COMPANY OF JAPAN, LTD.',
+   0x008089: u'TECNETICS (PTY) LTD.',
+   0x00808A: u'SUMMIT MICROSYSTEMS CORP.',
+   0x00808B: u'DACOLL LIMITED',
+   0x00808C: u'NetScout Systems, Inc.',
+   0x00808D: u'WESTCOAST TECHNOLOGY B.V.',
+   0x00808E: u'RADSTONE TECHNOLOGY',
+   0x00808F: u'C. ITOH ELECTRONICS, INC.',
+   0x008090: u'MICROTEK INTERNATIONAL, INC.',
+   0x008091: u'TOKYO ELECTRIC CO.,LTD',
+   0x008092: u'JAPAN COMPUTER INDUSTRY, INC.',
+   0x008093: u'XYRON CORPORATION',
+   0x008094: u'ALFA LAVAL AUTOMATION AB',
+   0x008095: u'BASIC MERTON HANDELSGES.M.B.H.',
+   0x008096: u'HUMAN DESIGNED SYSTEMS, INC.',
+   0x008097: u'CENTRALP AUTOMATISMES',
+   0x008098: u'TDK CORPORATION',
+   0x008099: u'KLOCKNER MOELLER IPC',
+   0x00809A: u'NOVUS NETWORKS LTD',
+   0x00809B: u'JUSTSYSTEM CORPORATION',
+   0x00809C: u'LUXCOM, INC.',
+   0x00809D: u'Commscraft Ltd.',
+   0x00809E: u'DATUS GMBH',
+   0x00809F: u'ALCATEL BUSINESS SYSTEMS',
+   0x0080A0: u'EDISA HEWLETT PACKARD S/A',
+   0x0080A1: u'MICROTEST, INC.',
+   0x0080A2: u'CREATIVE ELECTRONIC SYSTEMS',
+   0x0080A3: u'LANTRONIX',
+   0x0080A4: u'LIBERTY ELECTRONICS',
+   0x0080A5: u'SPEED INTERNATIONAL',
+   0x0080A6: u'REPUBLIC TECHNOLOGY, INC.',
+   0x0080A7: u'MEASUREX CORP.',
+   0x0080A8: u'VITACOM CORPORATION',
+   0x0080A9: u'CLEARPOINT RESEARCH',
+   0x0080AA: u'MAXPEED',
+   0x0080AB: u'DUKANE NETWORK INTEGRATION',
+   0x0080AC: u'IMLOGIX, DIVISION OF GENESYS',
+   0x0080AD: u'CNET TECHNOLOGY, INC.',
+   0x0080AE: u'HUGHES NETWORK SYSTEMS',
+   0x0080AF: u'ALLUMER CO., LTD.',
+   0x0080B0: u'ADVANCED INFORMATION',
+   0x0080B1: u'SOFTCOM A/S',
+   0x0080B2: u'NETWORK EQUIPMENT TECHNOLOGIES',
+   0x0080B3: u'AVAL DATA CORPORATION',
+   0x0080B4: u'SOPHIA SYSTEMS',
+   0x0080B5: u'UNITED NETWORKS INC.',
+   0x0080B6: u'THEMIS COMPUTER',
+   0x0080B7: u'STELLAR COMPUTER',
+   0x0080B8: u'BUG, INCORPORATED',
+   0x0080B9: u'ARCHE TECHNOLIGIES INC.',
+   0x0080BA: u'SPECIALIX (ASIA) PTE, LTD',
+   0x0080BB: u'HUGHES LAN SYSTEMS',
+   0x0080BC: u'HITACHI ENGINEERING CO., LTD',
+   0x0080BD: u'THE FURUKAWA ELECTRIC CO., LTD',
+   0x0080BE: u'ARIES RESEARCH',
+   0x0080BF: u'TAKAOKA ELECTRIC MFG. CO. LTD.',
+   0x0080C0: u'PENRIL DATACOMM',
+   0x0080C1: u'LANEX CORPORATION',
+   0x0080C2: u'IEEE 802.1 COMMITTEE',
+   0x0080C3: u'BICC INFORMATION SYSTEMS & SVC',
+   0x0080C4: u'DOCUMENT TECHNOLOGIES, INC.',
+   0x0080C5: u'NOVELLCO DE MEXICO',
+   0x0080C6: u'NATIONAL DATACOMM CORPORATION',
+   0x0080C7: u'XIRCOM',
+   0x0080C8: u'D-LINK SYSTEMS, INC.',
+   0x0080C9: u'ALBERTA MICROELECTRONIC CENTRE',
+   0x0080CA: u'NETCOM RESEARCH INCORPORATED',
+   0x0080CB: u'FALCO DATA PRODUCTS',
+   0x0080CC: u'MICROWAVE BYPASS SYSTEMS',
+   0x0080CD: u'MICRONICS COMPUTER, INC.',
+   0x0080CE: u'BROADCAST TELEVISION SYSTEMS',
+   0x0080CF: u'EMBEDDED PERFORMANCE INC.',
+   0x0080D0: u'COMPUTER PERIPHERALS, INC.',
+   0x0080D1: u'KIMTRON CORPORATION',
+   0x0080D2: u'SHINNIHONDENKO CO., LTD.',
+   0x0080D3: u'SHIVA CORP.',
+   0x0080D4: u'CHASE RESEARCH LTD.',
+   0x0080D5: u'CADRE TECHNOLOGIES',
+   0x0080D6: u'NUVOTECH, INC.',
+   0x0080D7: u'Fantum Engineering',
+   0x0080D8: u'NETWORK PERIPHERALS INC.',
+   0x0080D9: u'EMK ELEKTRONIK',
+   0x0080DA: u'BRUEL & KJAER',
+   0x0080DB: u'GRAPHON CORPORATION',
+   0x0080DC: u'PICKER INTERNATIONAL',
+   0x0080DD: u'GMX INC/GIMIX',
+   0x0080DE: u'GIPSI S.A.',
+   0x0080DF: u'ADC CODENOLL TECHNOLOGY CORP.',
+   0x0080E0: u'XTP SYSTEMS, INC.',
+   0x0080E1: u'STMICROELECTRONICS',
+   0x0080E2: u'T.D.I. CO., LTD.',
+   0x0080E3: u'CORAL NETWORK CORPORATION',
+   0x0080E4: u'NORTHWEST DIGITAL SYSTEMS, INC',
+   0x0080E5: u'LSI Logic Corporation',
+   0x0080E6: u'PEER NETWORKS, INC.',
+   0x0080E7: u'LYNWOOD SCIENTIFIC DEV. LTD.',
+   0x0080E8: u'CUMULUS CORPORATIION',
+   0x0080E9: u'Madge Ltd.',
+   0x0080EA: u'ADVA Optical Networking Ltd.',
+   0x0080EB: u'COMPCONTROL B.V.',
+   0x0080EC: u'SUPERCOMPUTING SOLUTIONS, INC.',
+   0x0080ED: u'IQ TECHNOLOGIES, INC.',
+   0x0080EE: u'THOMSON CSF',
+   0x0080EF: u'RATIONAL',
+   0x0080F0: u'Panasonic Communications Co., Ltd.',
+   0x0080F1: u'OPUS SYSTEMS',
+   0x0080F2: u'RAYCOM SYSTEMS INC',
+   0x0080F3: u'SUN ELECTRONICS CORP.',
+   0x0080F4: u'TELEMECANIQUE ELECTRIQUE',
+   0x0080F5: u'QUANTEL LTD',
+   0x0080F6: u'SYNERGY MICROSYSTEMS',
+   0x0080F7: u'ZENITH ELECTRONICS',
+   0x0080F8: u'MIZAR, INC.',
+   0x0080F9: u'HEURIKON CORPORATION',
+   0x0080FA: u'RWT GMBH',
+   0x0080FB: u'BVM LIMITED',
+   0x0080FC: u'AVATAR CORPORATION',
+   0x0080FD: u'EXSCEED CORPRATION',
+   0x0080FE: u'AZURE TECHNOLOGIES, INC.',
+   0x0080FF: u'SOC. DE TELEINFORMATIQUE RTC',
+   0x009000: u'DIAMOND MULTIMEDIA',
+   0x009001: u'NISHIMU ELECTRONICS INDUSTRIES CO., LTD.',
+   0x009002: u'ALLGON AB',
+   0x009003: u'APLIO',
+   0x009004: u'3COM EUROPE LTD.',
+   0x009005: u'PROTECH SYSTEMS CO., LTD.',
+   0x009006: u'HAMAMATSU PHOTONICS K.K.',
+   0x009007: u'DOMEX TECHNOLOGY CORP.',
+   0x009008: u'HanA Systems Inc.',
+   0x009009: u'i Controls, Inc.',
+   0x00900A: u'PROTON ELECTRONIC INDUSTRIAL CO., LTD.',
+   0x00900B: u'LANNER ELECTRONICS, INC.',
+   0x00900C: u'CISCO SYSTEMS, INC.',
+   0x00900D: u'Overland Storage Inc.',
+   0x00900E: u'HANDLINK TECHNOLOGIES, INC.',
+   0x00900F: u'KAWASAKI HEAVY INDUSTRIES, LTD',
+   0x009010: u'SIMULATION LABORATORIES, INC.',
+   0x009011: u'WAVTrace, Inc.',
+   0x009012: u'GLOBESPAN SEMICONDUCTOR, INC.',
+   0x009013: u'SAMSAN CORP.',
+   0x009014: u'ROTORK INSTRUMENTS, LTD.',
+   0x009015: u'CENTIGRAM COMMUNICATIONS CORP.',
+   0x009016: u'ZAC',
+   0x009017: u'ZYPCOM, INC.',
+   0x009018: u'ITO ELECTRIC INDUSTRY CO, LTD.',
+   0x009019: u'HERMES ELECTRONICS CO., LTD.',
+   0x00901A: u'UNISPHERE SOLUTIONS',
+   0x00901B: u'DIGITAL CONTROLS',
+   0x00901C: u'mps Software Gmbh',
+   0x00901D: u'PEC (NZ) LTD.',
+   0x00901E: u'SELESTA INGEGNE RIA S.P.A.',
+   0x00901F: u'ADTEC PRODUCTIONS, INC.',
+   0x009020: u'PHILIPS ANALYTICAL X-RAY B.V.',
+   0x009021: u'CISCO SYSTEMS, INC.',
+   0x009022: u'IVEX',
+   0x009023: u'ZILOG INC.',
+   0x009024: u'PIPELINKS, INC.',
+   0x009025: u'VISION SYSTEMS LTD. PTY',
+   0x009026: u'ADVANCED SWITCHING COMMUNICATIONS, INC.',
+   0x009027: u'INTEL CORPORATION',
+   0x009028: u'NIPPON SIGNAL CO., LTD.',
+   0x009029: u'CRYPTO AG',
+   0x00902A: u'COMMUNICATION DEVICES, INC.',
+   0x00902B: u'CISCO SYSTEMS, INC.',
+   0x00902C: u'DATA & CONTROL EQUIPMENT LTD.',
+   0x00902D: u'DATA ELECTRONICS (AUST.) PTY, LTD.',
+   0x00902E: u'NAMCO LIMITED',
+   0x00902F: u'NETCORE SYSTEMS, INC.',
+   0x009030: u'HONEYWELL-DATING',
+   0x009031: u'MYSTICOM, LTD.',
+   0x009032: u'PELCOMBE GROUP LTD.',
+   0x009033: u'INNOVAPHONE AG',
+   0x009034: u'IMAGIC, INC.',
+   0x009035: u'ALPHA TELECOM, INC.',
+   0x009036: u'ens, inc.',
+   0x009037: u'ACUCOMM, INC.',
+   0x009038: u'FOUNTAIN TECHNOLOGIES, INC.',
+   0x009039: u'SHASTA NETWORKS',
+   0x00903A: u'NIHON MEDIA TOOL INC.',
+   0x00903B: u'TriEMS Research Lab, Inc.',
+   0x00903C: u'ATLANTIC NETWORK SYSTEMS',
+   0x00903D: u'BIOPAC SYSTEMS, INC.',
+   0x00903E: u'N.V. PHILIPS INDUSTRIAL ACTIVITIES',
+   0x00903F: u'AZTEC RADIOMEDIA',
+   0x009040: u'Siemens Network Convergence LLC',
+   0x009041: u'APPLIED DIGITAL ACCESS',
+   0x009042: u'ECCS, Inc.',
+   0x009043: u'NICHIBEI DENSHI CO., LTD.',
+   0x009044: u'ASSURED DIGITAL, INC.',
+   0x009045: u'Marconi Communications',
+   0x009046: u'DEXDYNE, LTD.',
+   0x009047: u'GIGA FAST E. LTD.',
+   0x009048: u'ZEAL CORPORATION',
+   0x009049: u'ENTRIDIA CORPORATION',
+   0x00904A: u'CONCUR SYSTEM TECHNOLOGIES',
+   0x00904B: u'GemTek Technology Co., Ltd.',
+   0x00904C: u'EPIGRAM, INC.',
+   0x00904D: u'SPEC S.A.',
+   0x00904E: u'DELEM BV',
+   0x00904F: u'ABB POWER T&D COMPANY, INC.',
+   0x009050: u'TELESTE OY',
+   0x009051: u'ULTIMATE TECHNOLOGY CORP.',
+   0x009052: u'SELCOM ELETTRONICA S.R.L.',
+   0x009053: u'DAEWOO ELECTRONICS CO., LTD.',
+   0x009054: u'INNOVATIVE SEMICONDUCTORS, INC',
+   0x009055: u'PARKER HANNIFIN CORPORATION COMPUMOTOR DIVISION',
+   0x009056: u'TELESTREAM, INC.',
+   0x009057: u'AANetcom, Inc.',
+   0x009058: u'Ultra Electronics Ltd., Command and Control Systems',
+   0x009059: u'TELECOM DEVICE K.K.',
+   0x00905A: u'DEARBORN GROUP, INC.',
+   0x00905B: u'RAYMOND AND LAE ENGINEERING',
+   0x00905C: u'EDMI',
+   0x00905D: u'NETCOM SICHERHEITSTECHNIK GmbH',
+   0x00905E: u'RAULAND-BORG CORPORATION',
+   0x00905F: u'CISCO SYSTEMS, INC.',
+   0x009060: u'SYSTEM CREATE CORP.',
+   0x009061: u'PACIFIC RESEARCH & ENGINEERING CORPORATION',
+   0x009062: u'ICP VORTEX COMPUTERSYSTEME GmbH',
+   0x009063: u'COHERENT COMMUNICATIONS SYSTEMS CORPORATION',
+   0x009064: u'THOMSON BROADCAST SYSTEMS',
+   0x009065: u'FINISAR CORPORATION',
+   0x009066: u'Troika Networks, Inc.',
+   0x009067: u'WalkAbout Computers, Inc.',
+   0x009068: u'DVT CORP.',
+   0x009069: u'JUNIPER NETWORKS, INC.',
+   0x00906A: u'TURNSTONE SYSTEMS, INC.',
+   0x00906B: u'APPLIED RESOURCES, INC.',
+   0x00906C: u'Sartorius Hamburg GmbH',
+   0x00906D: u'CISCO SYSTEMS, INC.',
+   0x00906E: u'PRAXON, INC.',
+   0x00906F: u'CISCO SYSTEMS, INC.',
+   0x009070: u'NEO NETWORKS, INC.',
+   0x009071: u'Applied Innovation Inc.',
+   0x009072: u'SIMRAD AS',
+   0x009073: u'GAIO TECHNOLOGY',
+   0x009074: u'ARGON NETWORKS, INC.',
+   0x009075: u'NEC DO BRASIL S.A.',
+   0x009076: u'FMT AIRCRAFT GATE SUPPORT SYSTEMS AB',
+   0x009077: u'ADVANCED FIBRE COMMUNICATIONS',
+   0x009078: u'MER TELEMANAGEMENT SOLUTIONS, LTD.',
+   0x009079: u'ClearOne, Inc.',
+   0x00907A: u'SPECTRALINK CORP.',
+   0x00907B: u'E-TECH, INC.',
+   0x00907C: u'DIGITALCAST, INC.',
+   0x00907D: u'Lake Communications',
+   0x00907E: u'VETRONIX CORP.',
+   0x00907F: u'WatchGuard Technologies, Inc.',
+   0x009080: u'NOT LIMITED, INC.',
+   0x009081: u'ALOHA NETWORKS, INC.',
+   0x009082: u'FORCE INSTITUTE',
+   0x009083: u'TURBO COMMUNICATION, INC.',
+   0x009084: u'ATECH SYSTEM',
+   0x009085: u'GOLDEN ENTERPRISES, INC.',
+   0x009086: u'CISCO SYSTEMS, INC.',
+   0x009087: u'ITIS',
+   0x009088: u'BAXALL SECURITY LTD.',
+   0x009089: u'SOFTCOM MICROSYSTEMS, INC.',
+   0x00908A: u'BAYLY COMMUNICATIONS, INC.',
+   0x00908B: u'PFU Systems, Inc.',
+   0x00908C: u'ETREND ELECTRONICS, INC.',
+   0x00908D: u'VICKERS ELECTRONICS SYSTEMS',
+   0x00908E: u'Nortel Networks Broadband Access',
+   0x00908F: u'AUDIO CODES LTD.',
+   0x009090: u'I-BUS',
+   0x009091: u'DigitalScape, Inc.',
+   0x009092: u'CISCO SYSTEMS, INC.',
+   0x009093: u'NANAO CORPORATION',
+   0x009094: u'OSPREY TECHNOLOGIES, INC.',
+   0x009095: u'UNIVERSAL AVIONICS',
+   0x009096: u'ASKEY COMPUTER CORP.',
+   0x009097: u'SYCAMORE NETWORKS',
+   0x009098: u'SBC DESIGNS, INC.',
+   0x009099: u'ALLIED TELESIS, K.K.',
+   0x00909A: u'ONE WORLD SYSTEMS, INC.',
+   0x00909B: u'MARKPOINT AB',
+   0x00909C: u'Terayon Communications Systems',
+   0x00909D: u'NovaTech Process Solutions, LLC',
+   0x00909E: u'Critical IO, LLC',
+   0x00909F: u'DIGI-DATA CORPORATION',
+   0x0090A0: u'8X8 INC.',
+   0x0090A1: u'FLYING PIG SYSTEMS, LTD.',
+   0x0090A2: u'CYBERTAN TECHNOLOGY, INC.',
+   0x0090A3: u'Corecess Inc.',
+   0x0090A4: u'ALTIGA NETWORKS',
+   0x0090A5: u'SPECTRA LOGIC',
+   0x0090A6: u'CISCO SYSTEMS, INC.',
+   0x0090A7: u'CLIENTEC CORPORATION',
+   0x0090A8: u'NineTiles Networks, Ltd.',
+   0x0090A9: u'WESTERN DIGITAL',
+   0x0090AA: u'INDIGO ACTIVE VISION SYSTEMS LIMITED',
+   0x0090AB: u'CISCO SYSTEMS, INC.',
+   0x0090AC: u'OPTIVISION, INC.',
+   0x0090AD: u'ASPECT ELECTRONICS, INC.',
+   0x0090AE: u'ITALTEL S.p.A.',
+   0x0090AF: u'J. MORITA MFG. CORP.',
+   0x0090B0: u'VADEM',
+   0x0090B1: u'CISCO SYSTEMS, INC.',
+   0x0090B2: u'AVICI SYSTEMS INC.',
+   0x0090B3: u'AGRANAT SYSTEMS',
+   0x0090B4: u'WILLOWBROOK TECHNOLOGIES',
+   0x0090B5: u'NIKON CORPORATION',
+   0x0090B6: u'FIBEX SYSTEMS',
+   0x0090B7: u'DIGITAL LIGHTWAVE, INC.',
+   0x0090B8: u'ROHDE & SCHWARZ GMBH & CO. KG',
+   0x0090B9: u'BERAN INSTRUMENTS LTD.',
+   0x0090BA: u'VALID NETWORKS, INC.',
+   0x0090BB: u'TAINET COMMUNICATION SYSTEM Corp.',
+   0x0090BC: u'TELEMANN CO., LTD.',
+   0x0090BD: u'OMNIA COMMUNICATIONS, INC.',
+   0x0090BE: u'IBC/INTEGRATED BUSINESS COMPUTERS',
+   0x0090BF: u'CISCO SYSTEMS, INC.',
+   0x0090C0: u'K.J. LAW ENGINEERS, INC.',
+   0x0090C1: u'Peco II, Inc.',
+   0x0090C2: u'JK microsystems, Inc.',
+   0x0090C3: u'TOPIC SEMICONDUCTOR CORP.',
+   0x0090C4: u'JAVELIN SYSTEMS, INC.',
+   0x0090C5: u'INTERNET MAGIC, INC.',
+   0x0090C6: u'OPTIM SYSTEMS, INC.',
+   0x0090C7: u'ICOM INC.',
+   0x0090C8: u'WAVERIDER COMMUNICATIONS (CANADA) INC.',
+   0x0090C9: u'DPAC Technologies',
+   0x0090CA: u'ACCORD VIDEO TELECOMMUNICATIONS, LTD.',
+   0x0090CB: u'Wireless OnLine, Inc.',
+   0x0090CC: u'PLANET COMMUNICATIONS, INC.',
+   0x0090CD: u'ENT-EMPRESA NACIONAL DE TELECOMMUNICACOES, S.A.',
+   0x0090CE: u'TETRA GmbH',
+   0x0090CF: u'NORTEL',
+   0x0090D0: u'Thomson Telecom Belgium',
+   0x0090D1: u'LEICHU ENTERPRISE CO., LTD.',
+   0x0090D2: u'ARTEL VIDEO SYSTEMS',
+   0x0090D3: u'GIESECKE & DEVRIENT GmbH',
+   0x0090D4: u'BindView Development Corp.',
+   0x0090D5: u'EUPHONIX, INC.',
+   0x0090D6: u'CRYSTAL GROUP',
+   0x0090D7: u'NetBoost Corp.',
+   0x0090D8: u'WHITECROSS SYSTEMS',
+   0x0090D9: u'CISCO SYSTEMS, INC.',
+   0x0090DA: u'DYNARC, INC.',
+   0x0090DB: u'NEXT LEVEL COMMUNICATIONS',
+   0x0090DC: u'TECO INFORMATION SYSTEMS',
+   0x0090DD: u'THE MIHARU COMMUNICATIONS CO., LTD.',
+   0x0090DE: u'CARDKEY SYSTEMS, INC.',
+   0x0090DF: u'MITSUBISHI CHEMICAL AMERICA, INC.',
+   0x0090E0: u'SYSTRAN CORP.',
+   0x0090E1: u'TELENA S.P.A.',
+   0x0090E2: u'DISTRIBUTED PROCESSING TECHNOLOGY',
+   0x0090E3: u'AVEX ELECTRONICS INC.',
+   0x0090E4: u'NEC AMERICA, INC.',
+   0x0090E5: u'TEKNEMA, INC.',
+   0x0090E6: u'ACER LABORATORIES, INC.',
+   0x0090E7: u'HORSCH ELEKTRONIK AG',
+   0x0090E8: u'MOXA TECHNOLOGIES CORP., LTD.',
+   0x0090E9: u'JANZ COMPUTER AG',
+   0x0090EA: u'ALPHA TECHNOLOGIES, INC.',
+   0x0090EB: u'SENTRY TELECOM SYSTEMS',
+   0x0090EC: u'PYRESCOM',
+   0x0090ED: u'CENTRAL SYSTEM RESEARCH CO., LTD.',
+   0x0090EE: u'PERSONAL COMMUNICATIONS TECHNOLOGIES',
+   0x0090EF: u'INTEGRIX, INC.',
+   0x0090F0: u'Harmonic Video Systems Ltd.',
+   0x0090F1: u'DOT HILL SYSTEMS CORPORATION',
+   0x0090F2: u'CISCO SYSTEMS, INC.',
+   0x0090F3: u'ASPECT COMMUNICATIONS',
+   0x0090F4: u'LIGHTNING INSTRUMENTATION',
+   0x0090F5: u'CLEVO CO.',
+   0x0090F6: u'ESCALATE NETWORKS, INC.',
+   0x0090F7: u'NBASE COMMUNICATIONS LTD.',
+   0x0090F8: u'MEDIATRIX TELECOM',
+   0x0090F9: u'LEITCH',
+   0x0090FA: u'EMULEX Corp',
+   0x0090FB: u'PORTWELL, INC.',
+   0x0090FC: u'NETWORK COMPUTING DEVICES',
+   0x0090FD: u'CopperCom, Inc.',
+   0x0090FE: u'ELECOM CO., LTD.  (LANEED DIV.)',
+   0x0090FF: u'TELLUS TECHNOLOGY INC.',
+   0x0091D6: u'Crystal Group, Inc.',
+   0x009D8E: u'CARDIAC RECORDERS, INC.',
+   0x00A000: u'CENTILLION NETWORKS, INC.',
+   0x00A001: u'DRS Signal Solutions',
+   0x00A002: u'LEEDS & NORTHRUP AUSTRALIA PTY LTD',
+   0x00A003: u'STAEFA CONTROL SYSTEM',
+   0x00A004: u'NETPOWER, INC.',
+   0x00A005: u'DANIEL INSTRUMENTS, LTD.',
+   0x00A006: u'IMAGE DATA PROCESSING SYSTEM GROUP',
+   0x00A007: u'APEXX TECHNOLOGY, INC.',
+   0x00A008: u'NETCORP',
+   0x00A009: u'WHITETREE NETWORK',
+   0x00A00A: u'Airspan',
+   0x00A00B: u'COMPUTEX CO., LTD.',
+   0x00A00C: u'KINGMAX TECHNOLOGY, INC.',
+   0x00A00D: u'THE PANDA PROJECT',
+   0x00A00E: u'VISUAL NETWORKS, INC.',
+   0x00A00F: u'Broadband Technologies',
+   0x00A010: u'SYSLOGIC DATENTECHNIK AG',
+   0x00A011: u'MUTOH INDUSTRIES LTD.',
+   0x00A012: u'B.A.T.M. ADVANCED TECHNOLOGIES',
+   0x00A013: u'TELTREND LTD.',
+   0x00A014: u'CSIR',
+   0x00A015: u'WYLE',
+   0x00A016: u'MICROPOLIS CORP.',
+   0x00A017: u'J B M CORPORATION',
+   0x00A018: u'CREATIVE CONTROLLERS, INC.',
+   0x00A019: u'NEBULA CONSULTANTS, INC.',
+   0x00A01A: u'BINAR ELEKTRONIK AB',
+   0x00A01B: u'PREMISYS COMMUNICATIONS, INC.',
+   0x00A01C: u'NASCENT NETWORKS CORPORATION',
+   0x00A01D: u'SIXNET',
+   0x00A01E: u'EST CORPORATION',
+   0x00A01F: u'TRICORD SYSTEMS, INC.',
+   0x00A020: u'CITICORP/TTI',
+   0x00A021: u'General Dynamics',
+   0x00A022: u'CENTRE FOR DEVELOPMENT OF ADVANCED COMPUTING',
+   0x00A023: u'APPLIED CREATIVE TECHNOLOGY, INC.',
+   0x00A024: u'3COM CORPORATION',
+   0x00A025: u'REDCOM LABS INC.',
+   0x00A026: u'TELDAT, S.A.',
+   0x00A027: u'FIREPOWER SYSTEMS, INC.',
+   0x00A028: u'CONNER PERIPHERALS',
+   0x00A029: u'COULTER CORPORATION',
+   0x00A02A: u'TRANCELL SYSTEMS',
+   0x00A02B: u'TRANSITIONS RESEARCH CORP.',
+   0x00A02C: u'interWAVE Communications',
+   0x00A02D: u'1394 Trade Association',
+   0x00A02E: u'BRAND COMMUNICATIONS, LTD.',
+   0x00A02F: u'PIRELLI CAVI',
+   0x00A030: u'CAPTOR NV/SA',
+   0x00A031: u'HAZELTINE CORPORATION, MS 1-17',
+   0x00A032: u'GES SINGAPORE PTE. LTD.',
+   0x00A033: u'imc MeBsysteme GmbH',
+   0x00A034: u'AXEL',
+   0x00A035: u'CYLINK CORPORATION',
+   0x00A036: u'APPLIED NETWORK TECHNOLOGY',
+   0x00A037: u'DATASCOPE CORPORATION',
+   0x00A038: u'EMAIL ELECTRONICS',
+   0x00A039: u'ROSS TECHNOLOGY, INC.',
+   0x00A03A: u'KUBOTEK CORPORATION',
+   0x00A03B: u'TOSHIN ELECTRIC CO., LTD.',
+   0x00A03C: u'EG&G NUCLEAR INSTRUMENTS',
+   0x00A03D: u'OPTO-22',
+   0x00A03E: u'ATM FORUM',
+   0x00A03F: u'COMPUTER SOCIETY MICROPROCESSOR & MICROPROCESSOR STANDARDS C',
+   0x00A040: u'APPLE COMPUTER',
+   0x00A041: u'INFICON',
+   0x00A042: u'SPUR PRODUCTS CORP.',
+   0x00A043: u'AMERICAN TECHNOLOGY LABS, INC.',
+   0x00A044: u'NTT IT CO., LTD.',
+   0x00A045: u'PHOENIX CONTACT GMBH & CO.',
+   0x00A046: u'SCITEX CORP. LTD.',
+   0x00A047: u'INTEGRATED FITNESS CORP.',
+   0x00A048: u'QUESTECH, LTD.',
+   0x00A049: u'DIGITECH INDUSTRIES, INC.',
+   0x00A04A: u'NISSHIN ELECTRIC CO., LTD.',
+   0x00A04B: u'TFL LAN INC.',
+   0x00A04C: u'INNOVATIVE SYSTEMS & TECHNOLOGIES, INC.',
+   0x00A04D: u'EDA INSTRUMENTS, INC.',
+   0x00A04E: u'VOELKER TECHNOLOGIES, INC.',
+   0x00A04F: u'AMERITEC CORP.',
+   0x00A050: u'CYPRESS SEMICONDUCTOR',
+   0x00A051: u'ANGIA COMMUNICATIONS. INC.',
+   0x00A052: u'STANILITE ELECTRONICS PTY. LTD',
+   0x00A053: u'COMPACT DEVICES, INC.',
+   0x00A054: u'PRIVATE',
+   0x00A055: u'Data Device Corporation',
+   0x00A056: u'MICROPROSS',
+   0x00A057: u'LANCOM Systems GmbH',
+   0x00A058: u'GLORY, LTD.',
+   0x00A059: u'HAMILTON HALLMARK',
+   0x00A05A: u'KOFAX IMAGE PRODUCTS',
+   0x00A05B: u'MARQUIP, INC.',
+   0x00A05C: u'INVENTORY CONVERSION, INC./',
+   0x00A05D: u'CS COMPUTER SYSTEME GmbH',
+   0x00A05E: u'MYRIAD LOGIC INC.',
+   0x00A05F: u'BTG ENGINEERING BV',
+   0x00A060: u'ACER PERIPHERALS, INC.',
+   0x00A061: u'PURITAN BENNETT',
+   0x00A062: u'AES PRODATA',
+   0x00A063: u'JRL SYSTEMS, INC.',
+   0x00A064: u'KVB/ANALECT',
+   0x00A065: u'Symantec Corporation',
+   0x00A066: u'ISA CO., LTD.',
+   0x00A067: u'NETWORK SERVICES GROUP',
+   0x00A068: u'BHP LIMITED',
+   0x00A069: u'Symmetricom, Inc.',
+   0x00A06A: u'Verilink Corporation',
+   0x00A06B: u'DMS DORSCH MIKROSYSTEM GMBH',
+   0x00A06C: u'SHINDENGEN ELECTRIC MFG. CO., LTD.',
+   0x00A06D: u'MANNESMANN TALLY CORPORATION',
+   0x00A06E: u'AUSTRON, INC.',
+   0x00A06F: u'THE APPCON GROUP, INC.',
+   0x00A070: u'COASTCOM',
+   0x00A071: u'VIDEO LOTTERY TECHNOLOGIES,INC',
+   0x00A072: u'OVATION SYSTEMS LTD.',
+   0x00A073: u'COM21, INC.',
+   0x00A074: u'PERCEPTION TECHNOLOGY',
+   0x00A075: u'MICRON TECHNOLOGY, INC.',
+   0x00A076: u'CARDWARE LAB, INC.',
+   0x00A077: u'FUJITSU NEXION, INC.',
+   0x00A078: u'Marconi Communications',
+   0x00A079: u'ALPS ELECTRIC (USA), INC.',
+   0x00A07A: u'ADVANCED PERIPHERALS TECHNOLOGIES, INC.',
+   0x00A07B: u'DAWN COMPUTER INCORPORATION',
+   0x00A07C: u'TONYANG NYLON CO., LTD.',
+   0x00A07D: u'SEEQ TECHNOLOGY, INC.',
+   0x00A07E: u'AVID TECHNOLOGY, INC.',
+   0x00A07F: u'GSM-SYNTEL, LTD.',
+   0x00A080: u'SBE, Inc.',
+   0x00A081: u'ALCATEL DATA NETWORKS',
+   0x00A082: u'NKT ELEKTRONIK A/S',
+   0x00A083: u'ASIMMPHONY TURKEY',
+   0x00A084: u'DATAPLEX PTY. LTD.',
+   0x00A085: u'PRIVATE',
+   0x00A086: u'AMBER WAVE SYSTEMS, INC.',
+   0x00A087: u'Zarlink Semiconductor Ltd.',
+   0x00A088: u'ESSENTIAL COMMUNICATIONS',
+   0x00A089: u'XPOINT TECHNOLOGIES, INC.',
+   0x00A08A: u'BROOKTROUT TECHNOLOGY, INC.',
+   0x00A08B: u'ASTON ELECTRONIC DESIGNS LTD.',
+   0x00A08C: u'MultiMedia LANs, Inc.',
+   0x00A08D: u'JACOMO CORPORATION',
+   0x00A08E: u'Nokia Internet Communications',
+   0x00A08F: u'DESKNET SYSTEMS, INC.',
+   0x00A090: u'TimeStep Corporation',
+   0x00A091: u'APPLICOM INTERNATIONAL',
+   0x00A092: u'H. BOLLMANN MANUFACTURERS, LTD',
+   0x00A093: u'B/E AEROSPACE, Inc.',
+   0x00A094: u'COMSAT CORPORATION',
+   0x00A095: u'ACACIA NETWORKS, INC.',
+   0x00A096: u'MITUMI ELECTRIC CO., LTD.',
+   0x00A097: u'JC INFORMATION SYSTEMS',
+   0x00A098: u'NETWORK APPLIANCE CORP.',
+   0x00A099: u'K-NET LTD.',
+   0x00A09A: u'NIHON KOHDEN AMERICA',
+   0x00A09B: u'QPSX COMMUNICATIONS, LTD.',
+   0x00A09C: u'Xyplex, Inc.',
+   0x00A09D: u'JOHNATHON FREEMAN TECHNOLOGIES',
+   0x00A09E: u'ICTV',
+   0x00A09F: u'COMMVISION CORP.',
+   0x00A0A0: u'COMPACT DATA, LTD.',
+   0x00A0A1: u'EPIC DATA INC.',
+   0x00A0A2: u'DIGICOM S.P.A.',
+   0x00A0A3: u'RELIABLE POWER METERS',
+   0x00A0A4: u'MICROS SYSTEMS, INC.',
+   0x00A0A5: u'TEKNOR MICROSYSTEME, INC.',
+   0x00A0A6: u'M.I. SYSTEMS, K.K.',
+   0x00A0A7: u'VORAX CORPORATION',
+   0x00A0A8: u'RENEX CORPORATION',
+   0x00A0A9: u'NAVTEL COMMUNICATIONS INC.',
+   0x00A0AA: u'SPACELABS MEDICAL',
+   0x00A0AB: u'NETCS INFORMATIONSTECHNIK GMBH',
+   0x00A0AC: u'GILAT SATELLITE NETWORKS, LTD.',
+   0x00A0AD: u'MARCONI SPA',
+   0x00A0AE: u'NUCOM SYSTEMS, INC.',
+   0x00A0AF: u'WMS INDUSTRIES',
+   0x00A0B0: u'I-O DATA DEVICE, INC.',
+   0x00A0B1: u'FIRST VIRTUAL CORPORATION',
+   0x00A0B2: u'SHIMA SEIKI',
+   0x00A0B3: u'ZYKRONIX',
+   0x00A0B4: u'TEXAS MICROSYSTEMS, INC.',
+   0x00A0B5: u'3H TECHNOLOGY',
+   0x00A0B6: u'SANRITZ AUTOMATION CO., LTD.',
+   0x00A0B7: u'CORDANT, INC.',
+   0x00A0B8: u'SYMBIOS LOGIC INC.',
+   0x00A0B9: u'EAGLE TECHNOLOGY, INC.',
+   0x00A0BA: u'PATTON ELECTRONICS CO.',
+   0x00A0BB: u'HILAN GMBH',
+   0x00A0BC: u'VIASAT, INCORPORATED',
+   0x00A0BD: u'I-TECH CORP.',
+   0x00A0BE: u'INTEGRATED CIRCUIT SYSTEMS, INC. COMMUNICATIONS GROUP',
+   0x00A0BF: u'WIRELESS DATA GROUP MOTOROLA',
+   0x00A0C0: u'DIGITAL LINK CORP.',
+   0x00A0C1: u'ORTIVUS MEDICAL AB',
+   0x00A0C2: u'R.A. SYSTEMS CO., LTD.',
+   0x00A0C3: u'UNICOMPUTER GMBH',
+   0x00A0C4: u'CRISTIE ELECTRONICS LTD.',
+   0x00A0C5: u'ZYXEL COMMUNICATION',
+   0x00A0C6: u'QUALCOMM INCORPORATED',
+   0x00A0C7: u'TADIRAN TELECOMMUNICATIONS',
+   0x00A0C8: u'ADTRAN INC.',
+   0x00A0C9: u'INTEL CORPORATION - HF1-06',
+   0x00A0CA: u'FUJITSU DENSO LTD.',
+   0x00A0CB: u'ARK TELECOMMUNICATIONS, INC.',
+   0x00A0CC: u'LITE-ON COMMUNICATIONS, INC.',
+   0x00A0CD: u'DR. JOHANNES HEIDENHAIN GmbH',
+   0x00A0CE: u'ASTROCOM CORPORATION',
+   0x00A0CF: u'SOTAS, INC.',
+   0x00A0D0: u'TEN X TECHNOLOGY, INC.',
+   0x00A0D1: u'INVENTEC CORPORATION',
+   0x00A0D2: u'ALLIED TELESIS INTERNATIONAL CORPORATION',
+   0x00A0D3: u'INSTEM COMPUTER SYSTEMS, LTD.',
+   0x00A0D4: u'RADIOLAN,  INC.',
+   0x00A0D5: u'SIERRA WIRELESS INC.',
+   0x00A0D6: u'SBE, INC.',
+   0x00A0D7: u'KASTEN CHASE APPLIED RESEARCH',
+   0x00A0D8: u'SPECTRA - TEK',
+   0x00A0D9: u'CONVEX COMPUTER CORPORATION',
+   0x00A0DA: u'INTEGRATED SYSTEMS Technology, Inc.',
+   0x00A0DB: u'FISHER & PAYKEL PRODUCTION',
+   0x00A0DC: u'O.N. ELECTRONIC CO., LTD.',
+   0x00A0DD: u'AZONIX CORPORATION',
+   0x00A0DE: u'YAMAHA CORPORATION',
+   0x00A0DF: u'STS TECHNOLOGIES, INC.',
+   0x00A0E0: u'TENNYSON TECHNOLOGIES PTY LTD',
+   0x00A0E1: u'WESTPORT RESEARCH ASSOCIATES, INC.',
+   0x00A0E2: u'KEISOKU GIKEN CORP.',
+   0x00A0E3: u'XKL SYSTEMS CORP.',
+   0x00A0E4: u'OPTIQUEST',
+   0x00A0E5: u'NHC COMMUNICATIONS',
+   0x00A0E6: u'DIALOGIC CORPORATION',
+   0x00A0E7: u'CENTRAL DATA CORPORATION',
+   0x00A0E8: u'REUTERS HOLDINGS PLC',
+   0x00A0E9: u'ELECTRONIC RETAILING SYSTEMS INTERNATIONAL',
+   0x00A0EA: u'ETHERCOM CORP.',
+   0x00A0EB: u'Encore Networks',
+   0x00A0EC: u'TRANSMITTON LTD.',
+   0x00A0ED: u'Brooks Automation, Inc.',
+   0x00A0EE: u'NASHOBA NETWORKS',
+   0x00A0EF: u'LUCIDATA LTD.',
+   0x00A0F0: u'TORONTO MICROELECTRONICS INC.',
+   0x00A0F1: u'MTI',
+   0x00A0F2: u'INFOTEK COMMUNICATIONS, INC.',
+   0x00A0F3: u'STAUBLI',
+   0x00A0F4: u'GE',
+   0x00A0F5: u'RADGUARD LTD.',
+   0x00A0F6: u'AutoGas Systems Inc.',
+   0x00A0F7: u'V.I COMPUTER CORP.',
+   0x00A0F8: u'SYMBOL TECHNOLOGIES, INC.',
+   0x00A0F9: u'BINTEC COMMUNICATIONS GMBH',
+   0x00A0FA: u'Marconi Communication GmbH',
+   0x00A0FB: u'TORAY ENGINEERING CO., LTD.',
+   0x00A0FC: u'IMAGE SCIENCES, INC.',
+   0x00A0FD: u'SCITEX DIGITAL PRINTING, INC.',
+   0x00A0FE: u'BOSTON TECHNOLOGY, INC.',
+   0x00A0FF: u'TELLABS OPERATIONS, INC.',
+   0x00AA00: u'INTEL CORPORATION',
+   0x00AA01: u'INTEL CORPORATION',
+   0x00AA02: u'INTEL CORPORATION',
+   0x00AA3C: u'OLIVETTI TELECOM SPA (OLTECO)',
+   0x00B009: u'Grass Valley Group',
+   0x00B017: u'InfoGear Technology Corp.',
+   0x00B019: u'Casi-Rusco',
+   0x00B01C: u'Westport Technologies',
+   0x00B01E: u'Rantic Labs, Inc.',
+   0x00B02A: u'ORSYS GmbH',
+   0x00B02D: u'ViaGate Technologies, Inc.',
+   0x00B03B: u'HiQ Networks',
+   0x00B048: u'Marconi Communications Inc.',
+   0x00B04A: u'Cisco Systems, Inc.',
+   0x00B052: u'Intellon Corporation',
+   0x00B064: u'Cisco Systems, Inc.',
+   0x00B069: u'Honewell Oy',
+   0x00B06D: u'Jones Futurex Inc.',
+   0x00B080: u'Mannesmann Ipulsys B.V.',
+   0x00B086: u'LocSoft Limited',
+   0x00B08E: u'Cisco Systems, Inc.',
+   0x00B091: u'Transmeta Corp.',
+   0x00B094: u'Alaris, Inc.',
+   0x00B09A: u'Morrow Technologies Corp.',
+   0x00B09D: u'Point Grey Research Inc.',
+   0x00B0AC: u'SIAE-Microelettronica S.p.A.',
+   0x00B0AE: u'Symmetricom',
+   0x00B0B3: u'Xstreamis PLC',
+   0x00B0C2: u'Cisco Systems, Inc.',
+   0x00B0C7: u'Tellabs Operations, Inc.',
+   0x00B0CE: u'TECHNOLOGY RESCUE',
+   0x00B0D0: u'Dell Computer Corp.',
+   0x00B0DB: u'Nextcell, Inc.',
+   0x00B0DF: u'Reliable Data Technology, Inc.',
+   0x00B0E7: u'British Federal Ltd.',
+   0x00B0EC: u'EACEM',
+   0x00B0EE: u'Ajile Systems, Inc.',
+   0x00B0F0: u'CALY NETWORKS',
+   0x00B0F5: u'NetWorth Technologies, Inc.',
+   0x00BAC0: u'Biometric Access Company',
+   0x00BB01: u'OCTOTHORPE CORP.',
+   0x00BBF0: u'UNGERMANN-BASS INC.',
+   0x00C000: u'LANOPTICS, LTD.',
+   0x00C001: u'DIATEK PATIENT MANAGMENT',
+   0x00C002: u'SERCOMM CORPORATION',
+   0x00C003: u'GLOBALNET COMMUNICATIONS',
+   0x00C004: u'JAPAN BUSINESS COMPUTER CO.LTD',
+   0x00C005: u'LIVINGSTON ENTERPRISES, INC.',
+   0x00C006: u'NIPPON AVIONICS CO., LTD.',
+   0x00C007: u'PINNACLE DATA SYSTEMS, INC.',
+   0x00C008: u'SECO SRL',
+   0x00C009: u'KT TECHNOLOGY (S) PTE LTD',
+   0x00C00A: u'MICRO CRAFT',
+   0x00C00B: u'NORCONTROL A.S.',
+   0x00C00C: u'RELIA TECHNOLGIES',
+   0x00C00D: u'ADVANCED LOGIC RESEARCH, INC.',
+   0x00C00E: u'PSITECH, INC.',
+   0x00C00F: u'QUANTUM SOFTWARE SYSTEMS LTD.',
+   0x00C010: u'HIRAKAWA HEWTECH CORP.',
+   0x00C011: u'INTERACTIVE COMPUTING DEVICES',
+   0x00C012: u'NETSPAN CORPORATION',
+   0x00C013: u'NETRIX',
+   0x00C014: u'TELEMATICS CALABASAS INT\'L,INC',
+   0x00C015: u'NEW MEDIA CORPORATION',
+   0x00C016: u'ELECTRONIC THEATRE CONTROLS',
+   0x00C017: u'FORTE NETWORKS',
+   0x00C018: u'LANART CORPORATION',
+   0x00C019: u'LEAP TECHNOLOGY, INC.',
+   0x00C01A: u'COROMETRICS MEDICAL SYSTEMS',
+   0x00C01B: u'SOCKET COMMUNICATIONS, INC.',
+   0x00C01C: u'INTERLINK COMMUNICATIONS LTD.',
+   0x00C01D: u'GRAND JUNCTION NETWORKS, INC.',
+   0x00C01E: u'LA FRANCAISE DES JEUX',
+   0x00C01F: u'S.E.R.C.E.L.',
+   0x00C020: u'ARCO ELECTRONIC, CONTROL LTD.',
+   0x00C021: u'NETEXPRESS',
+   0x00C022: u'LASERMASTER TECHNOLOGIES, INC.',
+   0x00C023: u'TUTANKHAMON ELECTRONICS',
+   0x00C024: u'EDEN SISTEMAS DE COMPUTACAO SA',
+   0x00C025: u'DATAPRODUCTS CORPORATION',
+   0x00C026: u'LANS TECHNOLOGY CO., LTD.',
+   0x00C027: u'CIPHER SYSTEMS, INC.',
+   0x00C028: u'JASCO CORPORATION',
+   0x00C029: u'Nexans Deutschland AG - ANS',
+   0x00C02A: u'OHKURA ELECTRIC CO., LTD.',
+   0x00C02B: u'GERLOFF GESELLSCHAFT FUR',
+   0x00C02C: u'CENTRUM COMMUNICATIONS, INC.',
+   0x00C02D: u'FUJI PHOTO FILM CO., LTD.',
+   0x00C02E: u'NETWIZ',
+   0x00C02F: u'OKUMA CORPORATION',
+   0x00C030: u'INTEGRATED ENGINEERING B. V.',
+   0x00C031: u'DESIGN RESEARCH SYSTEMS, INC.',
+   0x00C032: u'I-CUBED LIMITED',
+   0x00C033: u'TELEBIT COMMUNICATIONS APS',
+   0x00C034: u'TRANSACTION NETWORK',
+   0x00C035: u'QUINTAR COMPANY',
+   0x00C036: u'RAYTECH ELECTRONIC CORP.',
+   0x00C037: u'DYNATEM',
+   0x00C038: u'RASTER IMAGE PROCESSING SYSTEM',
+   0x00C039: u'Teridian Semiconductor Corporation',
+   0x00C03A: u'MEN-MIKRO ELEKTRONIK GMBH',
+   0x00C03B: u'MULTIACCESS COMPUTING CORP.',
+   0x00C03C: u'TOWER TECH S.R.L.',
+   0x00C03D: u'WIESEMANN & THEIS GMBH',
+   0x00C03E: u'FA. GEBR. HELLER GMBH',
+   0x00C03F: u'STORES AUTOMATED SYSTEMS, INC.',
+   0x00C040: u'ECCI',
+   0x00C041: u'DIGITAL TRANSMISSION SYSTEMS',
+   0x00C042: u'DATALUX CORP.',
+   0x00C043: u'STRATACOM',
+   0x00C044: u'EMCOM CORPORATION',
+   0x00C045: u'ISOLATION SYSTEMS, LTD.',
+   0x00C046: u'KEMITRON LTD.',
+   0x00C047: u'UNIMICRO SYSTEMS, INC.',
+   0x00C048: u'BAY TECHNICAL ASSOCIATES',
+   0x00C049: u'U.S. ROBOTICS, INC.',
+   0x00C04A: u'GROUP 2000 AG',
+   0x00C04B: u'CREATIVE MICROSYSTEMS',
+   0x00C04C: u'DEPARTMENT OF FOREIGN AFFAIRS',
+   0x00C04D: u'MITEC, INC.',
+   0x00C04E: u'COMTROL CORPORATION',
+   0x00C04F: u'DELL COMPUTER CORPORATION',
+   0x00C050: u'TOYO DENKI SEIZO K.K.',
+   0x00C051: u'ADVANCED INTEGRATION RESEARCH',
+   0x00C052: u'BURR-BROWN',
+   0x00C053: u'Concerto Software',
+   0x00C054: u'NETWORK PERIPHERALS, LTD.',
+   0x00C055: u'MODULAR COMPUTING TECHNOLOGIES',
+   0x00C056: u'SOMELEC',
+   0x00C057: u'MYCO ELECTRONICS',
+   0x00C058: u'DATAEXPERT CORP.',
+   0x00C059: u'NIPPON DENSO CO., LTD.',
+   0x00C05A: u'SEMAPHORE COMMUNICATIONS CORP.',
+   0x00C05B: u'NETWORKS NORTHWEST, INC.',
+   0x00C05C: u'ELONEX PLC',
+   0x00C05D: u'L&N TECHNOLOGIES',
+   0x00C05E: u'VARI-LITE, INC.',
+   0x00C05F: u'FINE-PAL COMPANY LIMITED',
+   0x00C060: u'ID SCANDINAVIA AS',
+   0x00C061: u'SOLECTEK CORPORATION',
+   0x00C062: u'IMPULSE TECHNOLOGY',
+   0x00C063: u'MORNING STAR TECHNOLOGIES, INC',
+   0x00C064: u'GENERAL DATACOMM IND. INC.',
+   0x00C065: u'SCOPE COMMUNICATIONS, INC.',
+   0x00C066: u'DOCUPOINT, INC.',
+   0x00C067: u'UNITED BARCODE INDUSTRIES',
+   0x00C068: u'PHILIP DRAKE ELECTRONICS LTD.',
+   0x00C069: u'Axxcelera Broadband Wireless',
+   0x00C06A: u'ZAHNER-ELEKTRIK GMBH & CO. KG',
+   0x00C06B: u'OSI PLUS CORPORATION',
+   0x00C06C: u'SVEC COMPUTER CORP.',
+   0x00C06D: u'BOCA RESEARCH, INC.',
+   0x00C06E: u'HAFT TECHNOLOGY, INC.',
+   0x00C06F: u'KOMATSU LTD.',
+   0x00C070: u'SECTRA SECURE-TRANSMISSION AB',
+   0x00C071: u'AREANEX COMMUNICATIONS, INC.',
+   0x00C072: u'KNX LTD.',
+   0x00C073: u'XEDIA CORPORATION',
+   0x00C074: u'TOYODA AUTOMATIC LOOM',
+   0x00C075: u'XANTE CORPORATION',
+   0x00C076: u'I-DATA INTERNATIONAL A-S',
+   0x00C077: u'DAEWOO TELECOM LTD.',
+   0x00C078: u'COMPUTER SYSTEMS ENGINEERING',
+   0x00C079: u'FONSYS CO.,LTD.',
+   0x00C07A: u'PRIVA B.V.',
+   0x00C07B: u'ASCEND COMMUNICATIONS, INC.',
+   0x00C07C: u'HIGHTECH INFORMATION',
+   0x00C07D: u'RISC DEVELOPMENTS LTD.',
+   0x00C07E: u'KUBOTA CORPORATION ELECTRONIC',
+   0x00C07F: u'NUPON COMPUTING CORP.',
+   0x00C080: u'NETSTAR, INC.',
+   0x00C081: u'METRODATA LTD.',
+   0x00C082: u'MOORE PRODUCTS CO.',
+   0x00C083: u'TRACE MOUNTAIN PRODUCTS, INC.',
+   0x00C084: u'DATA LINK CORP. LTD.',
+   0x00C085: u'ELECTRONICS FOR IMAGING, INC.',
+   0x00C086: u'THE LYNK CORPORATION',
+   0x00C087: u'UUNET TECHNOLOGIES, INC.',
+   0x00C088: u'EKF ELEKTRONIK GMBH',
+   0x00C089: u'TELINDUS DISTRIBUTION',
+   0x00C08A: u'LAUTERBACH DATENTECHNIK GMBH',
+   0x00C08B: u'RISQ MODULAR SYSTEMS, INC.',
+   0x00C08C: u'PERFORMANCE TECHNOLOGIES, INC.',
+   0x00C08D: u'TRONIX PRODUCT DEVELOPMENT',
+   0x00C08E: u'NETWORK INFORMATION TECHNOLOGY',
+   0x00C08F: u'Matsushita Electric Works, Ltd.',
+   0x00C090: u'PRAIM S.R.L.',
+   0x00C091: u'JABIL CIRCUIT, INC.',
+   0x00C092: u'MENNEN MEDICAL INC.',
+   0x00C093: u'ALTA RESEARCH CORP.',
+   0x00C094: u'VMX INC.',
+   0x00C095: u'ZNYX',
+   0x00C096: u'TAMURA CORPORATION',
+   0x00C097: u'ARCHIPEL SA',
+   0x00C098: u'CHUNTEX ELECTRONIC CO., LTD.',
+   0x00C099: u'YOSHIKI INDUSTRIAL CO.,LTD.',
+   0x00C09A: u'PHOTONICS CORPORATION',
+   0x00C09B: u'RELIANCE COMM/TEC, R-TEC',
+   0x00C09C: u'TOA ELECTRONIC LTD.',
+   0x00C09D: u'DISTRIBUTED SYSTEMS INT\'L, INC',
+   0x00C09E: u'CACHE COMPUTERS, INC.',
+   0x00C09F: u'QUANTA COMPUTER, INC.',
+   0x00C0A0: u'ADVANCE MICRO RESEARCH, INC.',
+   0x00C0A1: u'TOKYO DENSHI SEKEI CO.',
+   0x00C0A2: u'INTERMEDIUM A/S',
+   0x00C0A3: u'DUAL ENTERPRISES CORPORATION',
+   0x00C0A4: u'UNIGRAF OY',
+   0x00C0A5: u'DICKENS DATA SYSTEMS',
+   0x00C0A6: u'EXICOM AUSTRALIA PTY. LTD',
+   0x00C0A7: u'SEEL LTD.',
+   0x00C0A8: u'GVC CORPORATION',
+   0x00C0A9: u'BARRON MCCANN LTD.',
+   0x00C0AA: u'SILICON VALLEY COMPUTER',
+   0x00C0AB: u'Telco Systems, Inc.',
+   0x00C0AC: u'GAMBIT COMPUTER COMMUNICATIONS',
+   0x00C0AD: u'MARBEN COMMUNICATION SYSTEMS',
+   0x00C0AE: u'TOWERCOM CO. INC. DBA PC HOUSE',
+   0x00C0AF: u'TEKLOGIX INC.',
+   0x00C0B0: u'GCC TECHNOLOGIES,INC.',
+   0x00C0B1: u'GENIUS NET CO.',
+   0x00C0B2: u'NORAND CORPORATION',
+   0x00C0B3: u'COMSTAT DATACOMM CORPORATION',
+   0x00C0B4: u'MYSON TECHNOLOGY, INC.',
+   0x00C0B5: u'CORPORATE NETWORK SYSTEMS,INC.',
+   0x00C0B6: u'Adaptec, Inc.',
+   0x00C0B7: u'AMERICAN POWER CONVERSION CORP',
+   0x00C0B8: u'FRASER\'S HILL LTD.',
+   0x00C0B9: u'FUNK SOFTWARE, INC.',
+   0x00C0BA: u'NETVANTAGE',
+   0x00C0BB: u'FORVAL CREATIVE, INC.',
+   0x00C0BC: u'TELECOM AUSTRALIA/CSSC',
+   0x00C0BD: u'INEX TECHNOLOGIES, INC.',
+   0x00C0BE: u'ALCATEL - SEL',
+   0x00C0BF: u'TECHNOLOGY CONCEPTS, LTD.',
+   0x00C0C0: u'SHORE MICROSYSTEMS, INC.',
+   0x00C0C1: u'QUAD/GRAPHICS, INC.',
+   0x00C0C2: u'INFINITE NETWORKS LTD.',
+   0x00C0C3: u'ACUSON COMPUTED SONOGRAPHY',
+   0x00C0C4: u'COMPUTER OPERATIONAL',
+   0x00C0C5: u'SID INFORMATICA',
+   0x00C0C6: u'PERSONAL MEDIA CORP.',
+   0x00C0C7: u'SPARKTRUM MICROSYSTEMS, INC.',
+   0x00C0C8: u'MICRO BYTE PTY. LTD.',
+   0x00C0C9: u'ELSAG BAILEY PROCESS',
+   0x00C0CA: u'ALFA, INC.',
+   0x00C0CB: u'CONTROL TECHNOLOGY CORPORATION',
+   0x00C0CC: u'TELESCIENCES CO SYSTEMS, INC.',
+   0x00C0CD: u'COMELTA, S.A.',
+   0x00C0CE: u'CEI SYSTEMS & ENGINEERING PTE',
+   0x00C0CF: u'IMATRAN VOIMA OY',
+   0x00C0D0: u'RATOC SYSTEM INC.',
+   0x00C0D1: u'COMTREE TECHNOLOGY CORPORATION',
+   0x00C0D2: u'SYNTELLECT, INC.',
+   0x00C0D3: u'OLYMPUS IMAGE SYSTEMS, INC.',
+   0x00C0D4: u'AXON NETWORKS, INC.',
+   0x00C0D5: u'QUANCOM ELECTRONIC GMBH',
+   0x00C0D6: u'J1 SYSTEMS, INC.',
+   0x00C0D7: u'TAIWAN TRADING CENTER DBA',
+   0x00C0D8: u'UNIVERSAL DATA SYSTEMS',
+   0x00C0D9: u'QUINTE NETWORK CONFIDENTIALITY',
+   0x00C0DA: u'NICE SYSTEMS LTD.',
+   0x00C0DB: u'IPC CORPORATION (PTE) LTD.',
+   0x00C0DC: u'EOS TECHNOLOGIES, INC.',
+   0x00C0DD: u'QLogic Corporation',
+   0x00C0DE: u'ZCOMM, INC.',
+   0x00C0DF: u'KYE Systems Corp.',
+   0x00C0E0: u'DSC COMMUNICATION CORP.',
+   0x00C0E1: u'SONIC SOLUTIONS',
+   0x00C0E2: u'CALCOMP, INC.',
+   0x00C0E3: u'OSITECH COMMUNICATIONS, INC.',
+   0x00C0E4: u'SIEMENS BUILDING',
+   0x00C0E5: u'GESPAC, S.A.',
+   0x00C0E6: u'Verilink Corporation',
+   0x00C0E7: u'FIBERDATA AB',
+   0x00C0E8: u'PLEXCOM, INC.',
+   0x00C0E9: u'OAK SOLUTIONS, LTD.',
+   0x00C0EA: u'ARRAY TECHNOLOGY LTD.',
+   0x00C0EB: u'SEH COMPUTERTECHNIK GMBH',
+   0x00C0EC: u'DAUPHIN TECHNOLOGY',
+   0x00C0ED: u'US ARMY ELECTRONIC',
+   0x00C0EE: u'KYOCERA CORPORATION',
+   0x00C0EF: u'ABIT CORPORATION',
+   0x00C0F0: u'KINGSTON TECHNOLOGY CORP.',
+   0x00C0F1: u'SHINKO ELECTRIC CO., LTD.',
+   0x00C0F2: u'TRANSITION NETWORKS',
+   0x00C0F3: u'NETWORK COMMUNICATIONS CORP.',
+   0x00C0F4: u'INTERLINK SYSTEM CO., LTD.',
+   0x00C0F5: u'METACOMP, INC.',
+   0x00C0F6: u'CELAN TECHNOLOGY INC.',
+   0x00C0F7: u'ENGAGE COMMUNICATION, INC.',
+   0x00C0F8: u'ABOUT COMPUTING INC.',
+   0x00C0F9: u'Motorola Embedded Computing Group',
+   0x00C0FA: u'CANARY COMMUNICATIONS, INC.',
+   0x00C0FB: u'ADVANCED TECHNOLOGY LABS',
+   0x00C0FC: u'ELASTIC REALITY, INC.',
+   0x00C0FD: u'PROSUM',
+   0x00C0FE: u'APTEC COMPUTER SYSTEMS, INC.',
+   0x00C0FF: u'DOT HILL SYSTEMS CORPORATION',
+   0x00CBBD: u'Cambridge Broadband Ltd.',
+   0x00CF1C: u'COMMUNICATION MACHINERY CORP.',
+   0x00D000: u'FERRAN SCIENTIFIC, INC.',
+   0x00D001: u'VST TECHNOLOGIES, INC.',
+   0x00D002: u'DITECH CORPORATION',
+   0x00D003: u'COMDA ENTERPRISES CORP.',
+   0x00D004: u'PENTACOM LTD.',
+   0x00D005: u'ZHS ZEITMANAGEMENTSYSTEME',
+   0x00D006: u'CISCO SYSTEMS, INC.',
+   0x00D007: u'MIC ASSOCIATES, INC.',
+   0x00D008: u'MACTELL CORPORATION',
+   0x00D009: u'HSING TECH. ENTERPRISE CO. LTD',
+   0x00D00A: u'LANACCESS TELECOM S.A.',
+   0x00D00B: u'RHK TECHNOLOGY, INC.',
+   0x00D00C: u'SNIJDER MICRO SYSTEMS',
+   0x00D00D: u'MICROMERITICS INSTRUMENT',
+   0x00D00E: u'PLURIS, INC.',
+   0x00D00F: u'SPEECH DESIGN GMBH',
+   0x00D010: u'CONVERGENT NETWORKS, INC.',
+   0x00D011: u'PRISM VIDEO, INC.',
+   0x00D012: u'GATEWORKS CORP.',
+   0x00D013: u'PRIMEX AEROSPACE COMPANY',
+   0x00D014: u'ROOT, INC.',
+   0x00D015: u'UNIVEX MICROTECHNOLOGY CORP.',
+   0x00D016: u'SCM MICROSYSTEMS, INC.',
+   0x00D017: u'SYNTECH INFORMATION CO., LTD.',
+   0x00D018: u'QWES. COM, INC.',
+   0x00D019: u'DAINIPPON SCREEN CORPORATE',
+   0x00D01A: u'URMET  TLC S.P.A.',
+   0x00D01B: u'MIMAKI ENGINEERING CO., LTD.',
+   0x00D01C: u'SBS TECHNOLOGIES,',
+   0x00D01D: u'FURUNO ELECTRIC CO., LTD.',
+   0x00D01E: u'PINGTEL CORP.',
+   0x00D01F: u'CTAM PTY. LTD.',
+   0x00D020: u'AIM SYSTEM, INC.',
+   0x00D021: u'REGENT ELECTRONICS CORP.',
+   0x00D022: u'INCREDIBLE TECHNOLOGIES, INC.',
+   0x00D023: u'INFORTREND TECHNOLOGY, INC.',
+   0x00D024: u'Cognex Corporation',
+   0x00D025: u'XROSSTECH, INC.',
+   0x00D026: u'HIRSCHMANN AUSTRIA GMBH',
+   0x00D027: u'APPLIED AUTOMATION, INC.',
+   0x00D028: u'OMNEON VIDEO NETWORKS',
+   0x00D029: u'WAKEFERN FOOD CORPORATION',
+   0x00D02A: u'Voxent Systems Ltd.',
+   0x00D02B: u'JETCELL, INC.',
+   0x00D02C: u'CAMPBELL SCIENTIFIC, INC.',
+   0x00D02D: u'ADEMCO',
+   0x00D02E: u'COMMUNICATION AUTOMATION CORP.',
+   0x00D02F: u'VLSI TECHNOLOGY INC.',
+   0x00D030: u'SAFETRAN SYSTEMS CORP.',
+   0x00D031: u'INDUSTRIAL LOGIC CORPORATION',
+   0x00D032: u'YANO ELECTRIC CO., LTD.',
+   0x00D033: u'DALIAN DAXIAN NETWORK',
+   0x00D034: u'ORMEC SYSTEMS CORP.',
+   0x00D035: u'BEHAVIOR TECH. COMPUTER CORP.',
+   0x00D036: u'TECHNOLOGY ATLANTA CORP.',
+   0x00D037: u'PHILIPS-DVS-LO BDR',
+   0x00D038: u'FIVEMERE, LTD.',
+   0x00D039: u'UTILICOM, INC.',
+   0x00D03A: u'ZONEWORX, INC.',
+   0x00D03B: u'VISION PRODUCTS PTY. LTD.',
+   0x00D03C: u'Vieo, Inc.',
+   0x00D03D: u'GALILEO TECHNOLOGY, LTD.',
+   0x00D03E: u'ROCKETCHIPS, INC.',
+   0x00D03F: u'AMERICAN COMMUNICATION',
+   0x00D040: u'SYSMATE CO., LTD.',
+   0x00D041: u'AMIGO TECHNOLOGY CO., LTD.',
+   0x00D042: u'MAHLO GMBH & CO. UG',
+   0x00D043: u'ZONAL RETAIL DATA SYSTEMS',
+   0x00D044: u'ALIDIAN NETWORKS, INC.',
+   0x00D045: u'KVASER AB',
+   0x00D046: u'DOLBY LABORATORIES, INC.',
+   0x00D047: u'XN TECHNOLOGIES',
+   0x00D048: u'ECTON, INC.',
+   0x00D049: u'IMPRESSTEK CO., LTD.',
+   0x00D04A: u'PRESENCE TECHNOLOGY GMBH',
+   0x00D04B: u'LA CIE GROUP S.A.',
+   0x00D04C: u'EUROTEL TELECOM LTD.',
+   0x00D04D: u'DIV OF RESEARCH & STATISTICS',
+   0x00D04E: u'LOGIBAG',
+   0x00D04F: u'BITRONICS, INC.',
+   0x00D050: u'ISKRATEL',
+   0x00D051: u'O2 MICRO, INC.',
+   0x00D052: u'ASCEND COMMUNICATIONS, INC.',
+   0x00D053: u'CONNECTED SYSTEMS',
+   0x00D054: u'SAS INSTITUTE INC.',
+   0x00D055: u'KATHREIN-WERKE KG',
+   0x00D056: u'SOMAT CORPORATION',
+   0x00D057: u'ULTRAK, INC.',
+   0x00D058: u'CISCO SYSTEMS, INC.',
+   0x00D059: u'AMBIT MICROSYSTEMS CORP.',
+   0x00D05A: u'SYMBIONICS, LTD.',
+   0x00D05B: u'ACROLOOP MOTION CONTROL',
+   0x00D05C: u'TECHNOTREND SYSTEMTECHNIK GMBH',
+   0x00D05D: u'INTELLIWORXX, INC.',
+   0x00D05E: u'STRATABEAM TECHNOLOGY, INC.',
+   0x00D05F: u'VALCOM, INC.',
+   0x00D060: u'PANASONIC EUROPEAN',
+   0x00D061: u'TREMON ENTERPRISES CO., LTD.',
+   0x00D062: u'DIGIGRAM',
+   0x00D063: u'CISCO SYSTEMS, INC.',
+   0x00D064: u'MULTITEL',
+   0x00D065: u'TOKO ELECTRIC',
+   0x00D066: u'WINTRISS ENGINEERING CORP.',
+   0x00D067: u'CAMPIO COMMUNICATIONS',
+   0x00D068: u'IWILL CORPORATION',
+   0x00D069: u'TECHNOLOGIC SYSTEMS',
+   0x00D06A: u'LINKUP SYSTEMS CORPORATION',
+   0x00D06B: u'SR TELECOM INC.',
+   0x00D06C: u'SHAREWAVE, INC.',
+   0x00D06D: u'ACRISON, INC.',
+   0x00D06E: u'TRENDVIEW RECORDERS LTD.',
+   0x00D06F: u'KMC CONTROLS',
+   0x00D070: u'LONG WELL ELECTRONICS CORP.',
+   0x00D071: u'ECHELON CORP.',
+   0x00D072: u'BROADLOGIC',
+   0x00D073: u'ACN ADVANCED COMMUNICATIONS',
+   0x00D074: u'TAQUA SYSTEMS, INC.',
+   0x00D075: u'ALARIS MEDICAL SYSTEMS, INC.',
+   0x00D076: u'Merrill Lynch & Co., Inc.',
+   0x00D077: u'LUCENT TECHNOLOGIES',
+   0x00D078: u'ELTEX OF SWEDEN AB',
+   0x00D079: u'CISCO SYSTEMS, INC.',
+   0x00D07A: u'AMAQUEST COMPUTER CORP.',
+   0x00D07B: u'COMCAM INTERNATIONAL LTD.',
+   0x00D07C: u'KOYO ELECTRONICS INC. CO.,LTD.',
+   0x00D07D: u'COSINE COMMUNICATIONS',
+   0x00D07E: u'KEYCORP LTD.',
+   0x00D07F: u'STRATEGY & TECHNOLOGY, LIMITED',
+   0x00D080: u'EXABYTE CORPORATION',
+   0x00D081: u'REAL TIME DEVICES USA, INC.',
+   0x00D082: u'IOWAVE INC.',
+   0x00D083: u'INVERTEX, INC.',
+   0x00D084: u'NEXCOMM SYSTEMS, INC.',
+   0x00D085: u'OTIS ELEVATOR COMPANY',
+   0x00D086: u'FOVEON, INC.',
+   0x00D087: u'MICROFIRST INC.',
+   0x00D088: u'Terayon Communications Systems',
+   0x00D089: u'DYNACOLOR, INC.',
+   0x00D08A: u'PHOTRON USA',
+   0x00D08B: u'ADVA Limited',
+   0x00D08C: u'GENOA TECHNOLOGY, INC.',
+   0x00D08D: u'PHOENIX GROUP, INC.',
+   0x00D08E: u'NVISION INC.',
+   0x00D08F: u'ARDENT TECHNOLOGIES, INC.',
+   0x00D090: u'CISCO SYSTEMS, INC.',
+   0x00D091: u'SMARTSAN SYSTEMS, INC.',
+   0x00D092: u'GLENAYRE WESTERN MULTIPLEX',
+   0x00D093: u'TQ - COMPONENTS GMBH',
+   0x00D094: u'TIMELINE VISTA, INC.',
+   0x00D095: u'Alcatel North America ESD',
+   0x00D096: u'3COM EUROPE LTD.',
+   0x00D097: u'CISCO SYSTEMS, INC.',
+   0x00D098: u'Photon Dynamics Canada Inc.',
+   0x00D099: u'ELCARD OY',
+   0x00D09A: u'FILANET CORPORATION',
+   0x00D09B: u'SPECTEL LTD.',
+   0x00D09C: u'KAPADIA COMMUNICATIONS',
+   0x00D09D: u'VERIS INDUSTRIES',
+   0x00D09E: u'2WIRE, INC.',
+   0x00D09F: u'NOVTEK TEST SYSTEMS',
+   0x00D0A0: u'MIPS DENMARK',
+   0x00D0A1: u'OSKAR VIERLING GMBH + CO. KG',
+   0x00D0A2: u'INTEGRATED DEVICE',
+   0x00D0A3: u'VOCAL DATA, INC.',
+   0x00D0A4: u'ALANTRO COMMUNICATIONS',
+   0x00D0A5: u'AMERICAN ARIUM',
+   0x00D0A6: u'LANBIRD TECHNOLOGY CO., LTD.',
+   0x00D0A7: u'TOKYO SOKKI KENKYUJO CO., LTD.',
+   0x00D0A8: u'NETWORK ENGINES, INC.',
+   0x00D0A9: u'SHINANO KENSHI CO., LTD.',
+   0x00D0AA: u'CHASE COMMUNICATIONS',
+   0x00D0AB: u'DELTAKABEL TELECOM CV',
+   0x00D0AC: u'GRAYSON WIRELESS',
+   0x00D0AD: u'TL INDUSTRIES',
+   0x00D0AE: u'ORESIS COMMUNICATIONS, INC.',
+   0x00D0AF: u'CUTLER-HAMMER, INC.',
+   0x00D0B0: u'BITSWITCH LTD.',
+   0x00D0B1: u'OMEGA ELECTRONICS SA',
+   0x00D0B2: u'XIOTECH CORPORATION',
+   0x00D0B3: u'DRS FLIGHT SAFETY AND',
+   0x00D0B4: u'KATSUJIMA CO., LTD.',
+   0x00D0B5: u'IPricot formerly DotCom',
+   0x00D0B6: u'CRESCENT NETWORKS, INC.',
+   0x00D0B7: u'INTEL CORPORATION',
+   0x00D0B8: u'Iomega Corporation',
+   0x00D0B9: u'MICROTEK INTERNATIONAL, INC.',
+   0x00D0BA: u'CISCO SYSTEMS, INC.',
+   0x00D0BB: u'CISCO SYSTEMS, INC.',
+   0x00D0BC: u'CISCO SYSTEMS, INC.',
+   0x00D0BD: u'SICAN GMBH',
+   0x00D0BE: u'EMUTEC INC.',
+   0x00D0BF: u'PIVOTAL TECHNOLOGIES',
+   0x00D0C0: u'CISCO SYSTEMS, INC.',
+   0x00D0C1: u'HARMONIC DATA SYSTEMS, LTD.',
+   0x00D0C2: u'BALTHAZAR TECHNOLOGY AB',
+   0x00D0C3: u'VIVID TECHNOLOGY PTE, LTD.',
+   0x00D0C4: u'TERATECH CORPORATION',
+   0x00D0C5: u'COMPUTATIONAL SYSTEMS, INC.',
+   0x00D0C6: u'THOMAS & BETTS CORP.',
+   0x00D0C7: u'PATHWAY, INC.',
+   0x00D0C8: u'I/O CONSULTING A/S',
+   0x00D0C9: u'ADVANTECH CO., LTD.',
+   0x00D0CA: u'INTRINSYC SOFTWARE INC.',
+   0x00D0CB: u'DASAN CO., LTD.',
+   0x00D0CC: u'TECHNOLOGIES LYRE INC.',
+   0x00D0CD: u'ATAN TECHNOLOGY INC.',
+   0x00D0CE: u'ASYST ELECTRONIC',
+   0x00D0CF: u'MORETON BAY',
+   0x00D0D0: u'ZHONGXING TELECOM LTD.',
+   0x00D0D1: u'SIROCCO SYSTEMS, INC.',
+   0x00D0D2: u'EPILOG CORPORATION',
+   0x00D0D3: u'CISCO SYSTEMS, INC.',
+   0x00D0D4: u'V-BITS, INC.',
+   0x00D0D5: u'GRUNDIG AG',
+   0x00D0D6: u'AETHRA TELECOMUNICAZIONI',
+   0x00D0D7: u'B2C2, INC.',
+   0x00D0D8: u'3Com Corporation',
+   0x00D0D9: u'DEDICATED MICROCOMPUTERS',
+   0x00D0DA: u'TAICOM DATA SYSTEMS CO., LTD.',
+   0x00D0DB: u'MCQUAY INTERNATIONAL',
+   0x00D0DC: u'MODULAR MINING SYSTEMS, INC.',
+   0x00D0DD: u'SUNRISE TELECOM, INC.',
+   0x00D0DE: u'PHILIPS MULTIMEDIA NETWORK',
+   0x00D0DF: u'KUZUMI ELECTRONICS, INC.',
+   0x00D0E0: u'DOOIN ELECTRONICS CO.',
+   0x00D0E1: u'AVIONITEK ISRAEL INC.',
+   0x00D0E2: u'MRT MICRO, INC.',
+   0x00D0E3: u'ELE-CHEM ENGINEERING CO., LTD.',
+   0x00D0E4: u'CISCO SYSTEMS, INC.',
+   0x00D0E5: u'SOLIDUM SYSTEMS CORP.',
+   0x00D0E6: u'IBOND INC.',
+   0x00D0E7: u'VCON TELECOMMUNICATION LTD.',
+   0x00D0E8: u'MAC SYSTEM CO., LTD.',
+   0x00D0E9: u'ADVANTAGE CENTURY',
+   0x00D0EA: u'NEXTONE COMMUNICATIONS, INC.',
+   0x00D0EB: u'LIGHTERA NETWORKS, INC.',
+   0x00D0EC: u'NAKAYO TELECOMMUNICATIONS, INC',
+   0x00D0ED: u'XIOX',
+   0x00D0EE: u'DICTAPHONE CORPORATION',
+   0x00D0EF: u'IGT',
+   0x00D0F0: u'CONVISION TECHNOLOGY GMBH',
+   0x00D0F1: u'SEGA ENTERPRISES, LTD.',
+   0x00D0F2: u'MONTEREY NETWORKS',
+   0x00D0F3: u'SOLARI DI UDINE SPA',
+   0x00D0F4: u'CARINTHIAN TECH INSTITUTE',
+   0x00D0F5: u'ORANGE MICRO, INC.',
+   0x00D0F6: u'Alcatel Canada',
+   0x00D0F7: u'NEXT NETS CORPORATION',
+   0x00D0F8: u'FUJIAN STAR TERMINAL',
+   0x00D0F9: u'ACUTE COMMUNICATIONS CORP.',
+   0x00D0FA: u'RACAL GUARDATA',
+   0x00D0FB: u'TEK MICROSYSTEMS, INCORPORATED',
+   0x00D0FC: u'GRANITE MICROSYSTEMS',
+   0x00D0FD: u'OPTIMA TELE.COM, INC.',
+   0x00D0FE: u'ASTRAL POINT',
+   0x00D0FF: u'CISCO SYSTEMS, INC.',
+   0x00DD00: u'UNGERMANN-BASS INC.',
+   0x00DD01: u'UNGERMANN-BASS INC.',
+   0x00DD02: u'UNGERMANN-BASS INC.',
+   0x00DD03: u'UNGERMANN-BASS INC.',
+   0x00DD04: u'UNGERMANN-BASS INC.',
+   0x00DD05: u'UNGERMANN-BASS INC.',
+   0x00DD06: u'UNGERMANN-BASS INC.',
+   0x00DD07: u'UNGERMANN-BASS INC.',
+   0x00DD08: u'UNGERMANN-BASS INC.',
+   0x00DD09: u'UNGERMANN-BASS INC.',
+   0x00DD0A: u'UNGERMANN-BASS INC.',
+   0x00DD0B: u'UNGERMANN-BASS INC.',
+   0x00DD0C: u'UNGERMANN-BASS INC.',
+   0x00DD0D: u'UNGERMANN-BASS INC.',
+   0x00DD0E: u'UNGERMANN-BASS INC.',
+   0x00DD0F: u'UNGERMANN-BASS INC.',
+   0x00E000: u'FUJITSU, LTD',
+   0x00E001: u'STRAND LIGHTING LIMITED',
+   0x00E002: u'CROSSROADS SYSTEMS, INC.',
+   0x00E003: u'NOKIA WIRELESS BUSINESS COMMUN',
+   0x00E004: u'PMC-SIERRA, INC.',
+   0x00E005: u'TECHNICAL CORP.',
+   0x00E006: u'SILICON INTEGRATED SYS. CORP.',
+   0x00E007: u'NETWORK ALCHEMY LTD.',
+   0x00E008: u'AMAZING CONTROLS! INC.',
+   0x00E009: u'MARATHON TECHNOLOGIES CORP.',
+   0x00E00A: u'DIBA, INC.',
+   0x00E00B: u'ROOFTOP COMMUNICATIONS CORP.',
+   0x00E00C: u'MOTOROLA',
+   0x00E00D: u'RADIANT SYSTEMS',
+   0x00E00E: u'AVALON IMAGING SYSTEMS, INC.',
+   0x00E00F: u'SHANGHAI BAUD DATA',
+   0x00E010: u'HESS SB-AUTOMATENBAU GmbH',
+   0x00E011: u'UNIDEN SAN DIEGO R&D CENTER, INC.',
+   0x00E012: u'PLUTO TECHNOLOGIES INTERNATIONAL INC.',
+   0x00E013: u'EASTERN ELECTRONIC CO., LTD.',
+   0x00E014: u'CISCO SYSTEMS, INC.',
+   0x00E015: u'HEIWA CORPORATION',
+   0x00E016: u'RAPID CITY COMMUNICATIONS',
+   0x00E017: u'EXXACT GmbH',
+   0x00E018: u'ASUSTEK COMPUTER INC.',
+   0x00E019: u'ING. GIORDANO ELETTRONICA',
+   0x00E01A: u'COMTEC SYSTEMS. CO., LTD.',
+   0x00E01B: u'SPHERE COMMUNICATIONS, INC.',
+   0x00E01C: u'MOBILITY ELECTRONICSY',
+   0x00E01D: u'WebTV NETWORKS, INC.',
+   0x00E01E: u'CISCO SYSTEMS, INC.',
+   0x00E01F: u'AVIDIA Systems, Inc.',
+   0x00E020: u'TECNOMEN OY',
+   0x00E021: u'FREEGATE CORP.',
+   0x00E022: u'Analog Devices Inc.',
+   0x00E023: u'TELRAD',
+   0x00E024: u'GADZOOX NETWORKS',
+   0x00E025: u'dit CO., LTD.',
+   0x00E026: u'Redlake MASD LLC',
+   0x00E027: u'DUX, INC.',
+   0x00E028: u'APTIX CORPORATION',
+   0x00E029: u'STANDARD MICROSYSTEMS CORP.',
+   0x00E02A: u'TANDBERG TELEVISION AS',
+   0x00E02B: u'EXTREME NETWORKS',
+   0x00E02C: u'AST COMPUTER',
+   0x00E02D: u'InnoMediaLogic, Inc.',
+   0x00E02E: u'SPC ELECTRONICS CORPORATION',
+   0x00E02F: u'MCNS HOLDINGS, L.P.',
+   0x00E030: u'MELITA INTERNATIONAL CORP.',
+   0x00E031: u'HAGIWARA ELECTRIC CO., LTD.',
+   0x00E032: u'MISYS FINANCIAL SYSTEMS, LTD.',
+   0x00E033: u'E.E.P.D. GmbH',
+   0x00E034: u'CISCO SYSTEMS, INC.',
+   0x00E035: u'LOUGHBOROUGH SOUND IMAGES, PLC',
+   0x00E036: u'PIONEER CORPORATION',
+   0x00E037: u'CENTURY CORPORATION',
+   0x00E038: u'PROXIMA CORPORATION',
+   0x00E039: u'PARADYNE CORP.',
+   0x00E03A: u'CABLETRON SYSTEMS, INC.',
+   0x00E03B: u'PROMINET CORPORATION',
+   0x00E03C: u'AdvanSys',
+   0x00E03D: u'FOCON ELECTRONIC SYSTEMS A/S',
+   0x00E03E: u'ALFATECH, INC.',
+   0x00E03F: u'JATON CORPORATION',
+   0x00E040: u'DeskStation Technology, Inc.',
+   0x00E041: u'CSPI',
+   0x00E042: u'Pacom Systems Ltd.',
+   0x00E043: u'VitalCom',
+   0x00E044: u'LSICS CORPORATION',
+   0x00E045: u'TOUCHWAVE, INC.',
+   0x00E046: u'BENTLY NEVADA CORP.',
+   0x00E047: u'INFOCUS SYSTEMS',
+   0x00E048: u'SDL COMMUNICATIONS, INC.',
+   0x00E049: u'MICROWI ELECTRONIC GmbH',
+   0x00E04A: u'ENHANCED MESSAGING SYSTEMS, INC',
+   0x00E04B: u'JUMP INDUSTRIELLE COMPUTERTECHNIK GmbH',
+   0x00E04C: u'REALTEK SEMICONDUCTOR CORP.',
+   0x00E04D: u'INTERNET INITIATIVE JAPAN, INC',
+   0x00E04E: u'SANYO DENKI CO., LTD.',
+   0x00E04F: u'CISCO SYSTEMS, INC.',
+   0x00E050: u'EXECUTONE INFORMATION SYSTEMS, INC.',
+   0x00E051: u'TALX CORPORATION',
+   0x00E052: u'FOUNDRY NETWORKS, INC.',
+   0x00E053: u'CELLPORT LABS, INC.',
+   0x00E054: u'KODAI HITEC CO., LTD.',
+   0x00E055: u'INGENIERIA ELECTRONICA COMERCIAL INELCOM S.A.',
+   0x00E056: u'HOLONTECH CORPORATION',
+   0x00E057: u'HAN MICROTELECOM. CO., LTD.',
+   0x00E058: u'PHASE ONE DENMARK A/S',
+   0x00E059: u'CONTROLLED ENVIRONMENTS, LTD.',
+   0x00E05A: u'GALEA NETWORK SECURITY',
+   0x00E05B: u'WEST END SYSTEMS CORP.',
+   0x00E05C: u'MATSUSHITA KOTOBUKI ELECTRONICS INDUSTRIES, LTD.',
+   0x00E05D: u'UNITEC CO., LTD.',
+   0x00E05E: u'JAPAN AVIATION ELECTRONICS INDUSTRY, LTD.',
+   0x00E05F: u'e-Net, Inc.',
+   0x00E060: u'SHERWOOD',
+   0x00E061: u'EdgePoint Networks, Inc.',
+   0x00E062: u'HOST ENGINEERING',
+   0x00E063: u'CABLETRON - YAGO SYSTEMS, INC.',
+   0x00E064: u'SAMSUNG ELECTRONICS',
+   0x00E065: u'OPTICAL ACCESS INTERNATIONAL',
+   0x00E066: u'ProMax Systems, Inc.',
+   0x00E067: u'eac AUTOMATION-CONSULTING GmbH',
+   0x00E068: u'MERRIMAC SYSTEMS INC.',
+   0x00E069: u'JAYCOR',
+   0x00E06A: u'KAPSCH AG',
+   0x00E06B: u'W&G SPECIAL PRODUCTS',
+   0x00E06C: u'AEP Systems International Ltd',
+   0x00E06D: u'COMPUWARE CORPORATION',
+   0x00E06E: u'FAR SYSTEMS S.p.A.',
+   0x00E06F: u'Terayon Communications Systems',
+   0x00E070: u'DH TECHNOLOGY',
+   0x00E071: u'EPIS MICROCOMPUTER',
+   0x00E072: u'LYNK',
+   0x00E073: u'NATIONAL AMUSEMENT NETWORK, INC.',
+   0x00E074: u'TIERNAN COMMUNICATIONS, INC.',
+   0x00E075: u'Verilink Corporation',
+   0x00E076: u'DEVELOPMENT CONCEPTS, INC.',
+   0x00E077: u'WEBGEAR, INC.',
+   0x00E078: u'BERKELEY NETWORKS',
+   0x00E079: u'A.T.N.R.',
+   0x00E07A: u'MIKRODIDAKT AB',
+   0x00E07B: u'BAY NETWORKS',
+   0x00E07C: u'METTLER-TOLEDO, INC.',
+   0x00E07D: u'NETRONIX, INC.',
+   0x00E07E: u'WALT DISNEY IMAGINEERING',
+   0x00E07F: u'LOGISTISTEM s.r.l.',
+   0x00E080: u'CONTROL RESOURCES CORPORATION',
+   0x00E081: u'TYAN COMPUTER CORP.',
+   0x00E082: u'ANERMA',
+   0x00E083: u'JATO TECHNOLOGIES, INC.',
+   0x00E084: u'COMPULITE R&D',
+   0x00E085: u'GLOBAL MAINTECH, INC.',
+   0x00E086: u'CYBEX COMPUTER PRODUCTS',
+   0x00E087: u'LeCroy - Networking Productions Division',
+   0x00E088: u'LTX CORPORATION',
+   0x00E089: u'ION Networks, Inc.',
+   0x00E08A: u'GEC AVERY, LTD.',
+   0x00E08B: u'QLogic Corp.',
+   0x00E08C: u'NEOPARADIGM LABS, INC.',
+   0x00E08D: u'PRESSURE SYSTEMS, INC.',
+   0x00E08E: u'UTSTARCOM',
+   0x00E08F: u'CISCO SYSTEMS, INC.',
+   0x00E090: u'BECKMAN LAB. AUTOMATION DIV.',
+   0x00E091: u'LG ELECTRONICS, INC.',
+   0x00E092: u'ADMTEK INCORPORATED',
+   0x00E093: u'ACKFIN NETWORKS',
+   0x00E094: u'OSAI SRL',
+   0x00E095: u'ADVANCED-VISION TECHNOLGIES CORP.',
+   0x00E096: u'SHIMADZU CORPORATION',
+   0x00E097: u'CARRIER ACCESS CORPORATION',
+   0x00E098: u'AboCom Systems, Inc.',
+   0x00E099: u'SAMSON AG',
+   0x00E09A: u'POSITRON INDUSTRIES, INC.',
+   0x00E09B: u'ENGAGE NETWORKS, INC.',
+   0x00E09C: u'MII',
+   0x00E09D: u'SARNOFF CORPORATION',
+   0x00E09E: u'QUANTUM CORPORATION',
+   0x00E09F: u'PIXEL VISION',
+   0x00E0A0: u'WILTRON CO.',
+   0x00E0A1: u'HIMA PAUL HILDEBRANDT GmbH Co. KG',
+   0x00E0A2: u'MICROSLATE INC.',
+   0x00E0A3: u'CISCO SYSTEMS, INC.',
+   0x00E0A4: u'ESAOTE S.p.A.',
+   0x00E0A5: u'ComCore Semiconductor, Inc.',
+   0x00E0A6: u'TELOGY NETWORKS, INC.',
+   0x00E0A7: u'IPC INFORMATION SYSTEMS, INC.',
+   0x00E0A8: u'SAT GmbH & Co.',
+   0x00E0A9: u'FUNAI ELECTRIC CO., LTD.',
+   0x00E0AA: u'ELECTROSONIC LTD.',
+   0x00E0AB: u'DIMAT S.A.',
+   0x00E0AC: u'MIDSCO, INC.',
+   0x00E0AD: u'EES TECHNOLOGY, LTD.',
+   0x00E0AE: u'XAQTI CORPORATION',
+   0x00E0AF: u'GENERAL DYNAMICS INFORMATION SYSTEMS',
+   0x00E0B0: u'CISCO SYSTEMS, INC.',
+   0x00E0B1: u'Alcatel North America ESD',
+   0x00E0B2: u'TELMAX COMMUNICATIONS CORP.',
+   0x00E0B3: u'EtherWAN Systems, Inc.',
+   0x00E0B4: u'TECHNO SCOPE CO., LTD.',
+   0x00E0B5: u'ARDENT COMMUNICATIONS CORP.',
+   0x00E0B6: u'Entrada Networks',
+   0x00E0B7: u'PI GROUP, LTD.',
+   0x00E0B8: u'GATEWAY 2000',
+   0x00E0B9: u'BYAS SYSTEMS',
+   0x00E0BA: u'BERGHOF AUTOMATIONSTECHNIK GmbH',
+   0x00E0BB: u'NBX CORPORATION',
+   0x00E0BC: u'SYMON COMMUNICATIONS, INC.',
+   0x00E0BD: u'INTERFACE SYSTEMS, INC.',
+   0x00E0BE: u'GENROCO INTERNATIONAL, INC.',
+   0x00E0BF: u'TORRENT NETWORKING TECHNOLOGIES CORP.',
+   0x00E0C0: u'SEIWA ELECTRIC MFG. CO., LTD.',
+   0x00E0C1: u'MEMOREX TELEX JAPAN, LTD.',
+   0x00E0C2: u'NECSY S.p.A.',
+   0x00E0C3: u'SAKAI SYSTEM DEVELOPMENT CORP.',
+   0x00E0C4: u'HORNER ELECTRIC, INC.',
+   0x00E0C5: u'BCOM ELECTRONICS INC.',
+   0x00E0C6: u'LINK2IT, L.L.C.',
+   0x00E0C7: u'EUROTECH SRL',
+   0x00E0C8: u'VIRTUAL ACCESS, LTD.',
+   0x00E0C9: u'AutomatedLogic Corporation',
+   0x00E0CA: u'BEST DATA PRODUCTS',
+   0x00E0CB: u'RESON, INC.',
+   0x00E0CC: u'HERO SYSTEMS, LTD.',
+   0x00E0CD: u'SENSIS CORPORATION',
+   0x00E0CE: u'ARN',
+   0x00E0CF: u'INTEGRATED DEVICE TECHNOLOGY, INC.',
+   0x00E0D0: u'NETSPEED, INC.',
+   0x00E0D1: u'TELSIS LIMITED',
+   0x00E0D2: u'VERSANET COMMUNICATIONS, INC.',
+   0x00E0D3: u'DATENTECHNIK GmbH',
+   0x00E0D4: u'EXCELLENT COMPUTER',
+   0x00E0D5: u'ARCXEL TECHNOLOGIES, INC.',
+   0x00E0D6: u'COMPUTER & COMMUNICATION RESEARCH LAB.',
+   0x00E0D7: u'SUNSHINE ELECTRONICS, INC.',
+   0x00E0D8: u'LANBit Computer, Inc.',
+   0x00E0D9: u'TAZMO CO., LTD.',
+   0x00E0DA: u'Alcatel North America ESD',
+   0x00E0DB: u'ViaVideo Communications, Inc.',
+   0x00E0DC: u'NEXWARE CORP.',
+   0x00E0DD: u'ZENITH ELECTRONICS CORPORATION',
+   0x00E0DE: u'DATAX NV',
+   0x00E0DF: u'KE KOMMUNIKATIONS-ELECTRONIK',
+   0x00E0E0: u'SI ELECTRONICS, LTD.',
+   0x00E0E1: u'G2 NETWORKS, INC.',
+   0x00E0E2: u'INNOVA CORP.',
+   0x00E0E3: u'SK-ELEKTRONIK GmbH',
+   0x00E0E4: u'FANUC ROBOTICS NORTH AMERICA, Inc.',
+   0x00E0E5: u'CINCO NETWORKS, INC.',
+   0x00E0E6: u'INCAA DATACOM B.V.',
+   0x00E0E7: u'RAYTHEON E-SYSTEMS, INC.',
+   0x00E0E8: u'GRETACODER Data Systems AG',
+   0x00E0E9: u'DATA LABS, INC.',
+   0x00E0EA: u'INNOVAT COMMUNICATIONS, INC.',
+   0x00E0EB: u'DIGICOM SYSTEMS, INCORPORATED',
+   0x00E0EC: u'CELESTICA INC.',
+   0x00E0ED: u'SILICOM, LTD.',
+   0x00E0EE: u'MAREL HF',
+   0x00E0EF: u'DIONEX',
+   0x00E0F0: u'ABLER TECHNOLOGY, INC.',
+   0x00E0F1: u'THAT CORPORATION',
+   0x00E0F2: u'ARLOTTO COMNET, INC.',
+   0x00E0F3: u'WebSprint Communications, Inc.',
+   0x00E0F4: u'INSIDE Technology A/S',
+   0x00E0F5: u'TELES AG',
+   0x00E0F6: u'DECISION EUROPE',
+   0x00E0F7: u'CISCO SYSTEMS, INC.',
+   0x00E0F8: u'DICNA CONTROL AB',
+   0x00E0F9: u'CISCO SYSTEMS, INC.',
+   0x00E0FA: u'TRL TECHNOLOGY, LTD.',
+   0x00E0FB: u'LEIGHTRONIX, INC.',
+   0x00E0FC: u'HUAWEI TECHNOLOGIES CO., LTD.',
+   0x00E0FD: u'A-TREND TECHNOLOGY CO., LTD.',
+   0x00E0FE: u'CISCO SYSTEMS, INC.',
+   0x00E0FF: u'SECURITY DYNAMICS TECHNOLOGIES, Inc.',
+   0x00E6D3: u'NIXDORF COMPUTER CORP.',
+   0x020701: u'RACAL-DATACOM',
+   0x021C7C: u'PERQ SYSTEMS CORPORATION',
+   0x026086: u'LOGIC REPLACEMENT TECH. LTD.',
+   0x02608C: u'3COM CORPORATION',
+   0x027001: u'RACAL-DATACOM',
+   0x0270B0: u'M/A-COM INC. COMPANIES',
+   0x0270B3: u'DATA RECALL LTD',
+   0x029D8E: u'CARDIAC RECORDERS INC.',
+   0x02AA3C: u'OLIVETTI TELECOMM SPA (OLTECO)',
+   0x02BB01: u'OCTOTHORPE CORP.',
+   0x02C08C: u'3COM CORPORATION',
+   0x02CF1C: u'COMMUNICATION MACHINERY CORP.',
+   0x02E6D3: u'NIXDORF COMPUTER CORPORATION',
+   0x040AE0: u'XMIT AG COMPUTER NETWORKS',
+   0x04E0C4: u'TRIUMPH-ADLER AG',
+   0x080001: u'COMPUTERVISION CORPORATION',
+   0x080002: u'BRIDGE COMMUNICATIONS INC.',
+   0x080003: u'ADVANCED COMPUTER COMM.',
+   0x080004: u'CROMEMCO INCORPORATED',
+   0x080005: u'SYMBOLICS INC.',
+   0x080006: u'SIEMENS AG',
+   0x080007: u'APPLE COMPUTER INC.',
+   0x080008: u'BOLT BERANEK AND NEWMAN INC.',
+   0x080009: u'HEWLETT PACKARD',
+   0x08000A: u'NESTAR SYSTEMS INCORPORATED',
+   0x08000B: u'UNISYS CORPORATION',
+   0x08000C: u'MIKLYN DEVELOPMENT CO.',
+   0x08000D: u'INTERNATIONAL COMPUTERS LTD.',
+   0x08000E: u'NCR CORPORATION',
+   0x08000F: u'MITEL CORPORATION',
+   0x080011: u'TEKTRONIX INC.',
+   0x080012: u'BELL ATLANTIC INTEGRATED SYST.',
+   0x080013: u'EXXON',
+   0x080014: u'EXCELAN',
+   0x080015: u'STC BUSINESS SYSTEMS',
+   0x080016: u'BARRISTER INFO SYS CORP',
+   0x080017: u'NATIONAL SEMICONDUCTOR',
+   0x080018: u'PIRELLI FOCOM NETWORKS',
+   0x080019: u'GENERAL ELECTRIC CORPORATION',
+   0x08001A: u'TIARA/ 10NET',
+   0x08001B: u'DATA GENERAL',
+   0x08001C: u'KDD-KOKUSAI DEBNSIN DENWA CO.',
+   0x08001D: u'ABLE COMMUNICATIONS INC.',
+   0x08001E: u'APOLLO COMPUTER INC.',
+   0x08001F: u'SHARP CORPORATION',
+   0x080020: u'SUN MICROSYSTEMS INC.',
+   0x080021: u'3M COMPANY',
+   0x080022: u'NBI INC.',
+   0x080023: u'Panasonic Communications Co., Ltd.',
+   0x080024: u'10NET COMMUNICATIONS/DCA',
+   0x080025: u'CONTROL DATA',
+   0x080026: u'NORSK DATA A.S.',
+   0x080027: u'CADMUS COMPUTER SYSTEMS',
+   0x080028: u'Texas Instruments',
+   0x080029: u'MEGATEK CORPORATION',
+   0x08002A: u'MOSAIC TECHNOLOGIES INC.',
+   0x08002B: u'DIGITAL EQUIPMENT CORPORATION',
+   0x08002C: u'BRITTON LEE INC.',
+   0x08002D: u'LAN-TEC INC.',
+   0x08002E: u'METAPHOR COMPUTER SYSTEMS',
+   0x08002F: u'PRIME COMPUTER INC.',
+   0x080030: u'NETWORK RESEARCH CORPORATION',
+   0x080030: u'CERN',
+   0x080030: u'ROYAL MELBOURNE INST OF TECH',
+   0x080031: u'LITTLE MACHINES INC.',
+   0x080032: u'TIGAN INCORPORATED',
+   0x080033: u'BAUSCH & LOMB',
+   0x080034: u'FILENET CORPORATION',
+   0x080035: u'MICROFIVE CORPORATION',
+   0x080036: u'INTERGRAPH CORPORATION',
+   0x080037: u'FUJI-XEROX CO. LTD.',
+   0x080038: u'CII HONEYWELL BULL',
+   0x080039: u'SPIDER SYSTEMS LIMITED',
+   0x08003A: u'ORCATECH INC.',
+   0x08003B: u'TORUS SYSTEMS LIMITED',
+   0x08003C: u'SCHLUMBERGER WELL SERVICES',
+   0x08003D: u'CADNETIX CORPORATIONS',
+   0x08003E: u'CODEX CORPORATION',
+   0x08003F: u'FRED KOSCHARA ENTERPRISES',
+   0x080040: u'FERRANTI COMPUTER SYS. LIMITED',
+   0x080041: u'RACAL-MILGO INFORMATION SYS..',
+   0x080042: u'JAPAN MACNICS CORP.',
+   0x080043: u'PIXEL COMPUTER INC.',
+   0x080044: u'DAVID SYSTEMS INC.',
+   0x080045: u'CONCURRENT COMPUTER CORP.',
+   0x080046: u'SONY CORPORATION LTD.',
+   0x080047: u'SEQUENT COMPUTER SYSTEMS INC.',
+   0x080048: u'EUROTHERM GAUGING SYSTEMS',
+   0x080049: u'UNIVATION',
+   0x08004A: u'BANYAN SYSTEMS INC.',
+   0x08004B: u'PLANNING RESEARCH CORP.',
+   0x08004C: u'HYDRA COMPUTER SYSTEMS INC.',
+   0x08004D: u'CORVUS SYSTEMS INC.',
+   0x08004E: u'3COM EUROPE LTD.',
+   0x08004F: u'CYGNET SYSTEMS',
+   0x080050: u'DAISY SYSTEMS CORP.',
+   0x080051: u'EXPERDATA',
+   0x080052: u'INSYSTEC',
+   0x080053: u'MIDDLE EAST TECH. UNIVERSITY',
+   0x080055: u'STANFORD TELECOMM. INC.',
+   0x080056: u'STANFORD LINEAR ACCEL. CENTER',
+   0x080057: u'EVANS & SUTHERLAND',
+   0x080058: u'SYSTEMS CONCEPTS',
+   0x080059: u'A/S MYCRON',
+   0x08005A: u'IBM CORPORATION',
+   0x08005B: u'VTA TECHNOLOGIES INC.',
+   0x08005C: u'FOUR PHASE SYSTEMS',
+   0x08005D: u'GOULD INC.',
+   0x08005E: u'COUNTERPOINT COMPUTER INC.',
+   0x08005F: u'SABER TECHNOLOGY CORP.',
+   0x080060: u'INDUSTRIAL NETWORKING INC.',
+   0x080061: u'JAROGATE LTD.',
+   0x080062: u'GENERAL DYNAMICS',
+   0x080063: u'PLESSEY',
+   0x080064: u'AUTOPHON AG',
+   0x080065: u'GENRAD INC.',
+   0x080066: u'AGFA CORPORATION',
+   0x080067: u'COMDESIGN',
+   0x080068: u'RIDGE COMPUTERS',
+   0x080069: u'SILICON GRAPHICS INC.',
+   0x08006A: u'ATT BELL LABORATORIES',
+   0x08006B: u'ACCEL TECHNOLOGIES INC.',
+   0x08006C: u'SUNTEK TECHNOLOGY INT\'L',
+   0x08006D: u'WHITECHAPEL COMPUTER WORKS',
+   0x08006E: u'MASSCOMP',
+   0x08006F: u'PHILIPS APELDOORN B.V.',
+   0x080070: u'MITSUBISHI ELECTRIC CORP.',
+   0x080071: u'MATRA (DSIE)',
+   0x080072: u'XEROX CORP UNIV GRANT PROGRAM',
+   0x080073: u'TECMAR INC.',
+   0x080074: u'CASIO COMPUTER CO. LTD.',
+   0x080075: u'DANSK DATA ELECTRONIK',
+   0x080076: u'PC LAN TECHNOLOGIES',
+   0x080077: u'TSL COMMUNICATIONS LTD.',
+   0x080078: u'ACCELL CORPORATION',
+   0x080079: u'THE DROID WORKS',
+   0x08007A: u'INDATA',
+   0x08007B: u'SANYO ELECTRIC CO. LTD.',
+   0x08007C: u'VITALINK COMMUNICATIONS CORP.',
+   0x08007E: u'AMALGAMATED WIRELESS(AUS) LTD',
+   0x08007F: u'CARNEGIE-MELLON UNIVERSITY',
+   0x080080: u'AES DATA INC.',
+   0x080081: u'ASTECH INC.',
+   0x080082: u'VERITAS SOFTWARE',
+   0x080083: u'Seiko Instruments Inc.',
+   0x080084: u'TOMEN ELECTRONICS CORP.',
+   0x080085: u'ELXSI',
+   0x080086: u'KONICA MINOLTA HOLDINGS, INC.',
+   0x080087: u'XYPLEX',
+   0x080088: u'MCDATA CORPORATION',
+   0x080089: u'KINETICS',
+   0x08008A: u'PERFORMANCE TECHNOLOGY',
+   0x08008B: u'PYRAMID TECHNOLOGY CORP.',
+   0x08008C: u'NETWORK RESEARCH CORPORATION',
+   0x08008D: u'XYVISION INC.',
+   0x08008E: u'TANDEM COMPUTERS',
+   0x08008F: u'CHIPCOM CORPORATION',
+   0x080090: u'SONOMA SYSTEMS',
+   0x081443: u'UNIBRAIN S.A.',
+   0x08BBCC: u'AK-NORD EDV VERTRIEBSGES. mbH',
+   0x100000: u'PRIVATE',
+   0x10005A: u'IBM CORPORATION',
+   0x1000E8: u'NATIONAL SEMICONDUCTOR',
+   0x1100AA: u'PRIVATE',
+   0x800010: u'ATT BELL LABORATORIES',
+   0xA06A00: u'Verilink Corporation',
+   0xAA0000: u'DIGITAL EQUIPMENT CORPORATION',
+   0xAA0001: u'DIGITAL EQUIPMENT CORPORATION',
+   0xAA0002: u'DIGITAL EQUIPMENT CORPORATION',
+   0xAA0003: u'DIGITAL EQUIPMENT CORPORATION',
+   0xAA0004: u'DIGITAL EQUIPMENT CORPORATION',
+   0xACDE48: u'PRIVATE',
+}
+
diff --git a/lib/hachoir_parser/network/tcpdump.py b/lib/hachoir_parser/network/tcpdump.py
new file mode 100644
index 0000000000000000000000000000000000000000..564e61892ca958fdc59b4972a5774138e6e2a74d
--- /dev/null
+++ b/lib/hachoir_parser/network/tcpdump.py
@@ -0,0 +1,505 @@
+"""
+Tcpdump parser
+
+Source:
+ * libpcap source code (file savefile.c)
+ * RFC 791 (IPv4)
+ * RFC 792 (ICMP)
+ * RFC 793 (TCP)
+ * RFC 1122 (Requirements for Internet Hosts)
+
+Author: Victor Stinner
+Creation: 23 march 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    Enum, Bytes, NullBytes, RawBytes,
+    UInt8, UInt16, UInt32, Int32, TimestampUnix32,
+    Bit, Bits, NullBits)
+from hachoir_core.endian import NETWORK_ENDIAN, LITTLE_ENDIAN
+from hachoir_core.tools import humanDuration
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.tools import createDict
+from hachoir_parser.network.common import MAC48_Address, IPv4_Address, IPv6_Address
+
+def diff(field):
+    return humanDuration(field.value*1000)
+
+class Layer(FieldSet):
+    endian = NETWORK_ENDIAN
+    def parseNext(self, parent):
+        return None
+
+class ARP(Layer):
+    opcode_name = {
+        1: "request",
+        2: "reply"
+    }
+    endian = NETWORK_ENDIAN
+
+    def createFields(self):
+        yield UInt16(self, "hw_type")
+        yield UInt16(self, "proto_type")
+        yield UInt8(self, "hw_size")
+        yield UInt8(self, "proto_size")
+        yield Enum(UInt16(self, "opcode"), ARP.opcode_name)
+        yield MAC48_Address(self, "src_mac")
+        yield IPv4_Address(self, "src_ip")
+        yield MAC48_Address(self, "dst_mac")
+        yield IPv4_Address(self, "dst_ip")
+
+    def createDescription(self):
+        desc = "ARP: %s" % self["opcode"].display
+        opcode = self["opcode"].value
+        src_ip = self["src_ip"].display
+        dst_ip = self["dst_ip"].display
+        if opcode == 1:
+            desc += ", %s ask %s" % (dst_ip, src_ip)
+        elif opcode == 2:
+            desc += " from %s" % src_ip
+        return desc
+
+class TCP_Option(FieldSet):
+    NOP = 1
+    MAX_SEGMENT = 2
+    WINDOW_SCALE = 3
+    SACK = 4
+    TIMESTAMP = 8
+
+    code_name = {
+        NOP: "NOP",
+        MAX_SEGMENT: "Max segment size",
+        WINDOW_SCALE: "Window scale",
+        SACK: "SACK permitted",
+        TIMESTAMP: "Timestamp"
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        if self["code"].value != self.NOP:
+            self._size = self["length"].value * 8
+        else:
+            self._size = 8
+
+    def createFields(self):
+        yield Enum(UInt8(self, "code", "Code"), self.code_name)
+        code = self["code"].value
+        if code == self.NOP:
+            return
+        yield UInt8(self, "length", "Option size in bytes")
+        if code == self.MAX_SEGMENT:
+            yield UInt16(self, "max_seg", "Maximum segment size")
+        elif code == self.WINDOW_SCALE:
+            yield UInt8(self, "win_scale", "Window scale")
+        elif code == self.TIMESTAMP:
+            yield UInt32(self, "ts_val", "Timestamp value")
+            yield UInt32(self, "ts_ecr", "Timestamp echo reply")
+        else:
+            size = (self.size - self.current_size) // 8
+            if size:
+                yield RawBytes(self, "data", size)
+
+    def createDescription(self):
+        return "TCP option: %s" % self["code"].display
+
+class TCP(Layer):
+    port_name = {
+        13: "daytime",
+        20: "ftp data",
+        21: "ftp",
+        23: "telnet",
+        25: "smtp",
+        53: "dns",
+        63: "dhcp/bootp",
+        80: "HTTP",
+        110: "pop3",
+        119: "nntp",
+        123: "ntp",
+        139: "netbios session service",
+        1863: "MSNMS",
+        6667: "IRC"
+    }
+
+    def createFields(self):
+        yield Enum(UInt16(self, "src"), self.port_name)
+        yield Enum(UInt16(self, "dst"), self.port_name)
+        yield UInt32(self, "seq_num")
+        yield UInt32(self, "ack_num")
+
+        yield Bits(self, "hdrlen", 6, "Header lenght")
+        yield NullBits(self, "reserved", 2, "Reserved")
+
+        yield Bit(self, "cgst", "Congestion Window Reduced")
+        yield Bit(self, "ecn-echo", "ECN-echo")
+        yield Bit(self, "urg", "Urgent")
+        yield Bit(self, "ack", "Acknowledge")
+        yield Bit(self, "psh", "Push mmode")
+        yield Bit(self, "rst", "Reset connection")
+        yield Bit(self, "syn", "Synchronize")
+        yield Bit(self, "fin", "Stop the connection")
+
+        yield UInt16(self, "winsize", "Windows size")
+        yield textHandler(UInt16(self, "checksum"), hexadecimal)
+        yield UInt16(self, "urgent")
+
+        size = self["hdrlen"].value*8 - self.current_size
+        while 0 < size:
+            option = TCP_Option(self, "option[]")
+            yield option
+            size -= option.size
+
+    def parseNext(self, parent):
+        return None
+
+    def createDescription(self):
+        src = self["src"].value
+        dst = self["dst"].value
+        if src < 32768:
+            src = self["src"].display
+        else:
+            src = None
+        if dst < 32768:
+            dst = self["dst"].display
+        else:
+            dst = None
+        desc = "TCP"
+        if src != None and dst != None:
+            desc += " (%s->%s)" % (src, dst)
+        elif src != None:
+            desc += " (%s->)" % (src)
+        elif dst != None:
+            desc += " (->%s)" % (dst)
+
+        # Get flags
+        flags = []
+        if self["syn"].value:
+            flags.append("SYN")
+        if self["ack"].value:
+            flags.append("ACK")
+        if self["fin"].value:
+            flags.append("FIN")
+        if self["rst"].value:
+            flags.append("RST")
+        if flags:
+            desc += " [%s]" % (",".join(flags))
+        return desc
+
+class UDP(Layer):
+    port_name = {
+        12: "daytime",
+        22: "ssh",
+        53: "DNS",
+        67: "dhcp/bootp",
+        80: "http",
+        110: "pop3",
+        123: "ntp",
+        137: "netbios name service",
+        138: "netbios datagram service"
+    }
+
+    def createFields(self):
+        yield Enum(UInt16(self, "src"), UDP.port_name)
+        yield Enum(UInt16(self, "dst"), UDP.port_name)
+        yield UInt16(self, "length")
+        yield textHandler(UInt16(self, "checksum"), hexadecimal)
+
+    def createDescription(self):
+        return "UDP (%s->%s)" % (self["src"].display, self["dst"].display)
+
+class ICMP(Layer):
+    REJECT = 3
+    PONG = 0
+    PING = 8
+    type_desc = {
+        PONG: "Pong",
+        REJECT: "Reject",
+        PING: "Ping"
+    }
+    reject_reason = {
+        0: "net unreachable",
+        1: "host unreachable",
+        2: "protocol unreachable",
+        3: "port unreachable",
+        4: "fragmentation needed and DF set",
+        5: "source route failed",
+        6: "Destination network unknown error",
+        7: "Destination host unknown error",
+        8: "Source host isolated error",
+        9: "Destination network administratively prohibited",
+        10: "Destination host administratively prohibited",
+        11: "Unreachable network for Type Of Service",
+        12: "Unreachable host for Type Of Service.",
+        13: "Communication administratively prohibited",
+        14: "Host precedence violation",
+        15: "Precedence cutoff in effect"
+    }
+
+    def createFields(self):
+        # Type
+        yield Enum(UInt8(self, "type"), self.type_desc)
+        type = self["type"].value
+
+        # Code
+        field = UInt8(self, "code")
+        if type == 3:
+            field = Enum(field, self.reject_reason)
+        yield field
+
+        # Options
+        yield textHandler(UInt16(self, "checksum"), hexadecimal)
+        if type in (self.PING, self.PONG): # and self["code"].value == 0:
+            yield UInt16(self, "id")
+            yield UInt16(self, "seq_num")
+            # follow: ping data
+        elif type == self.REJECT:
+            yield NullBytes(self, "empty", 2)
+            yield UInt16(self, "hop_mtu", "Next-Hop MTU")
+
+    def createDescription(self):
+        type = self["type"].value
+        if type in (self.PING, self.PONG):
+            return "%s (num=%s)" % (self["type"].display, self["seq_num"].value)
+        else:
+            return "ICMP (%s)" % self["type"].display
+
+    def parseNext(self, parent):
+        if self["type"].value == self.REJECT:
+            return IPv4(parent, "rejected_ipv4")
+        else:
+            return None
+
+class ICMPv6(Layer):
+    ECHO_REQUEST = 128
+    ECHO_REPLY = 129
+    TYPE_DESC = {
+        128: "Echo request",
+        129: "Echo reply",
+    }
+
+    def createFields(self):
+        yield Enum(UInt8(self, "type"), self.TYPE_DESC)
+        yield UInt8(self, "code")
+        yield textHandler(UInt16(self, "checksum"), hexadecimal)
+
+        if self['type'].value in (self.ECHO_REQUEST, self.ECHO_REPLY):
+            yield UInt16(self, "id")
+            yield UInt16(self, "sequence")
+
+    def createDescription(self):
+        if self['type'].value in (self.ECHO_REQUEST, self.ECHO_REPLY):
+            return "%s (num=%s)" % (self["type"].display, self["sequence"].value)
+        else:
+            return "ICMPv6 (%s)" % self["type"].display
+
+class IP(Layer):
+    PROTOCOL_INFO = {
+         1: ("icmp", ICMP, "ICMP"),
+        6: ("tcp",  TCP, "TCP"),
+        17: ("udp",  UDP, "UDP"),
+        58: ("icmpv6",  ICMPv6, "ICMPv6"),
+        60: ("ipv6_opts", None, "IPv6 destination option"),
+    }
+    PROTOCOL_NAME = createDict(PROTOCOL_INFO, 2)
+
+    def parseNext(self, parent):
+        proto = self["protocol"].value
+        if proto not in self.PROTOCOL_INFO:
+            return None
+        name, parser, desc = self.PROTOCOL_INFO[proto]
+        if not parser:
+            return None
+        return parser(parent, name)
+
+class IPv4(IP):
+    precedence_name = {
+        7: "Network Control",
+        6: "Internetwork Control",
+        5: "CRITIC/ECP",
+        4: "Flash Override",
+        3: "Flash",
+        2: "Immediate",
+        1: "Priority",
+        0: "Routine",
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = self["hdr_size"].value * 32
+
+    def createFields(self):
+        yield Bits(self, "version", 4, "Version")
+        yield Bits(self, "hdr_size", 4, "Header size divided by 5")
+
+        # Type of service
+        yield Enum(Bits(self, "precedence", 3, "Precedence"), self.precedence_name)
+        yield Bit(self, "low_delay", "If set, low delay, else normal delay")
+        yield Bit(self, "high_throu", "If set, high throughput, else normal throughput")
+        yield Bit(self, "high_rel", "If set, high relibility, else normal")
+        yield NullBits(self, "reserved[]", 2, "(reserved for future use)")
+
+        yield UInt16(self, "length")
+        yield UInt16(self, "id")
+
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "df", "Don't fragment")
+        yield Bit(self, "more_frag", "There are more fragments? if not set, it's the last one")
+        yield Bits(self, "frag_ofst_lo", 5)
+        yield UInt8(self, "frag_ofst_hi")
+        yield UInt8(self, "ttl", "Type to live")
+        yield Enum(UInt8(self, "protocol"), self.PROTOCOL_NAME)
+        yield textHandler(UInt16(self, "checksum"), hexadecimal)
+        yield IPv4_Address(self, "src")
+        yield IPv4_Address(self, "dst")
+
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield RawBytes(self, "options", size)
+
+    def createDescription(self):
+        return "IPv4 (%s>%s)" % (self["src"].display, self["dst"].display)
+
+class IPv6(IP):
+    static_size = 40 * 8
+    endian = NETWORK_ENDIAN
+
+    def createFields(self):
+        yield Bits(self, "version", 4, "Version (6)")
+        yield Bits(self, "traffic", 8, "Traffic class")
+        yield Bits(self, "flow", 20, "Flow label")
+        yield Bits(self, "length", 16, "Payload length")
+        yield Enum(Bits(self, "protocol", 8, "Next header"), self.PROTOCOL_NAME)
+        yield Bits(self, "hop_limit", 8, "Hop limit")
+        yield IPv6_Address(self, "src")
+        yield IPv6_Address(self, "dst")
+
+    def createDescription(self):
+        return "IPv6 (%s>%s)" % (self["src"].display, self["dst"].display)
+
+class Layer2(Layer):
+    PROTO_INFO = {
+        0x0800: ("ipv4", IPv4, "IPv4"),
+        0x0806: ("arp",  ARP,  "ARP"),
+        0x86dd: ("ipv6", IPv6, "IPv6"),
+    }
+    PROTO_DESC = createDict(PROTO_INFO, 2)
+
+    def parseNext(self, parent):
+        try:
+            name, parser, desc = self.PROTO_INFO[ self["protocol"].value ]
+            return parser(parent, name)
+        except KeyError:
+            return None
+
+class Unicast(Layer2):
+    packet_type_name = {
+        0: "Unicast to us"
+    }
+    def createFields(self):
+        yield Enum(UInt16(self, "packet_type"), self.packet_type_name)
+        yield UInt16(self, "addr_type", "Link-layer address type")
+        yield UInt16(self, "addr_length", "Link-layer address length")
+        length = self["addr_length"].value
+        length = 8   # FIXME: Should we use addr_length or not?
+        if length:
+            yield RawBytes(self, "source", length)
+        yield Enum(UInt16(self, "protocol"), self.PROTO_DESC)
+
+class Ethernet(Layer2):
+    static_size = 14*8
+    def createFields(self):
+        yield MAC48_Address(self, "dst")
+        yield MAC48_Address(self, "src")
+        yield Enum(UInt16(self, "protocol"), self.PROTO_DESC)
+
+    def createDescription(self):
+        return "Ethernet: %s>%s (%s)" % \
+            (self["src"].display, self["dst"].display, self["protocol"].display)
+
+class Packet(FieldSet):
+    endian = LITTLE_ENDIAN
+
+    def __init__(self, parent, name, parser, first_name):
+        FieldSet.__init__(self, parent, name)
+        self._size = (16 + self["caplen"].value) * 8
+        self._first_parser = parser
+        self._first_name = first_name
+
+    def createFields(self):
+        yield TimestampUnix32(self, "ts_epoch", "Timestamp (Epoch)")
+        yield UInt32(self, "ts_nanosec", "Timestamp (nano second)")
+        yield UInt32(self, "caplen", "length of portion present")
+        yield UInt32(self, "len", "length this packet (off wire)")
+
+        # Read different layers
+        field = self._first_parser(self, self._first_name)
+        while field:
+            yield field
+            field = field.parseNext(self)
+
+        # Read data if any
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield RawBytes(self, "data", size)
+
+    def getTimestamp(self):
+        nano_sec = float(self["ts_nanosec"].value) / 100
+        from datetime import timedelta
+        return self["ts_epoch"].value + timedelta(microseconds=nano_sec)
+
+    def createDescription(self):
+        t0 = self["/packet[0]"].getTimestamp()
+#        ts = max(self.getTimestamp() - t0, t0)
+        ts = self.getTimestamp() - t0
+        #text = ["%1.6f: " % ts]
+        text = ["%s: " % ts]
+        if "icmp" in self:
+            text.append(self["icmp"].description)
+        elif "tcp" in self:
+            text.append(self["tcp"].description)
+        elif "udp" in self:
+            text.append(self["udp"].description)
+        elif "arp" in self:
+            text.append(self["arp"].description)
+        else:
+            text.append("Packet")
+        return "".join(text)
+
+class TcpdumpFile(Parser):
+    PARSER_TAGS = {
+        "id": "tcpdump",
+        "category": "misc",
+        "min_size": 24*8,
+        "description": "Tcpdump file (network)",
+        "magic": (("\xd4\xc3\xb2\xa1", 0),),
+    }
+    endian = LITTLE_ENDIAN
+
+    LINK_TYPE = {
+          1: ("ethernet", Ethernet),
+        113: ("unicast", Unicast),
+    }
+    LINK_TYPE_DESC = createDict(LINK_TYPE, 0)
+
+    def validate(self):
+        if self["id"].value != "\xd4\xc3\xb2\xa1":
+            return "Wrong file signature"
+        if self["link_type"].value not in self.LINK_TYPE:
+            return "Unknown link type"
+        return True
+
+    def createFields(self):
+        yield Bytes(self, "id", 4, "Tcpdump identifier")
+        yield UInt16(self, "maj_ver", "Major version")
+        yield UInt16(self, "min_ver", "Minor version")
+        yield Int32(self, "this_zone", "GMT to local time zone correction")
+        yield Int32(self, "sigfigs", "accuracy of timestamps")
+        yield UInt32(self, "snap_len", "max length saved portion of each pkt")
+        yield Enum(UInt32(self, "link_type", "data link type"), self.LINK_TYPE_DESC)
+        link = self["link_type"].value
+        if link not in self.LINK_TYPE:
+            raise ParserError("Unknown link type: %s" % link)
+        name, parser = self.LINK_TYPE[link]
+        while self.current_size < self.size:
+            yield Packet(self, "packet[]", parser, name)
+
diff --git a/lib/hachoir_parser/parser.py b/lib/hachoir_parser/parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..e81241588be10a954d16ad6e10f818c2215ce3fc
--- /dev/null
+++ b/lib/hachoir_parser/parser.py
@@ -0,0 +1,154 @@
+import hachoir_core.config as config
+from hachoir_core.field import Parser as GenericParser
+from hachoir_core.error import HACHOIR_ERRORS, HachoirError, error
+from hachoir_core.tools import makeUnicode
+from hachoir_core.i18n import _
+from inspect import getmro
+
+
+class ValidateError(HachoirError):
+    pass
+
+class HachoirParser(object):
+    """
+    A parser is the root of all other fields. It create first level of fields
+    and have special attributes and methods:
+    - tags: dictionnary with keys:
+      - "file_ext": classical file extensions (string or tuple of strings) ;
+      - "mime": MIME type(s) (string or tuple of strings) ;
+      - "description": String describing the parser.
+    - endian: Byte order (L{BIG_ENDIAN} or L{LITTLE_ENDIAN}) of input data ;
+    - stream: Data input stream (set in L{__init__()}).
+
+    Default values:
+    - size: Field set size will be size of input stream ;
+    - mime_type: First MIME type of tags["mime"] (if it does exist,
+      None otherwise).
+    """
+
+    _autofix = False
+
+    def __init__(self, stream, **args):
+        validate = args.pop("validate", False)
+        self._mime_type = None
+        while validate:
+            nbits = self.getParserTags()["min_size"]
+            if stream.sizeGe(nbits):
+                res = self.validate()
+                if res is True:
+                    break
+                res = makeUnicode(res)
+            else:
+                res = _("stream is smaller than %s.%s bytes" % divmod(nbits, 8))
+            raise ValidateError(res or _("no reason given"))
+        self._autofix = True
+
+    #--- Methods that can be overridden -------------------------------------
+    def createDescription(self):
+        """
+        Create an Unicode description
+        """
+        return self.PARSER_TAGS["description"]
+
+    def createMimeType(self):
+        """
+        Create MIME type (string), eg. "image/png"
+
+        If it returns None, "application/octet-stream" is used.
+        """
+        if "mime" in self.PARSER_TAGS:
+            return self.PARSER_TAGS["mime"][0]
+        return None
+
+    def validate(self):
+        """
+        Check that the parser is able to parse the stream. Valid results:
+        - True: stream looks valid ;
+        - False: stream is invalid ;
+        - str: string describing the error.
+        """
+        raise NotImplementedError()
+
+    #--- Getter methods -----------------------------------------------------
+    def _getDescription(self):
+        if self._description is None:
+            try:
+                self._description = self.createDescription()
+                if isinstance(self._description, str):
+                    self._description = makeUnicode(self._description)
+            except HACHOIR_ERRORS, err:
+                error("Error getting description of %s: %s" \
+                    % (self.path, unicode(err)))
+                self._description = self.PARSER_TAGS["description"]
+        return self._description
+    description = property(_getDescription,
+    doc="Description of the parser")
+
+    def _getMimeType(self):
+        if not self._mime_type:
+            try:
+                self._mime_type = self.createMimeType()
+            except HACHOIR_ERRORS, err:
+                self.error("Error when creating MIME type: %s" % unicode(err))
+            if not self._mime_type \
+            and self.createMimeType != Parser.createMimeType:
+                self._mime_type = Parser.createMimeType(self)
+            if not self._mime_type:
+                self._mime_type = u"application/octet-stream"
+        return self._mime_type
+    mime_type = property(_getMimeType)
+
+    def createContentSize(self):
+        return None
+    def _getContentSize(self):
+        if not hasattr(self, "_content_size"):
+            try:
+                self._content_size = self.createContentSize()
+            except HACHOIR_ERRORS, err:
+                error("Unable to compute %s content size: %s" % (self.__class__.__name__, err))
+                self._content_size = None
+        return self._content_size
+    content_size = property(_getContentSize)
+
+    def createFilenameSuffix(self):
+        """
+        Create filename suffix: "." + first value of self.PARSER_TAGS["file_ext"],
+        or None if self.PARSER_TAGS["file_ext"] doesn't exist.
+        """
+        file_ext = self.getParserTags().get("file_ext")
+        if isinstance(file_ext, (tuple, list)):
+            file_ext = file_ext[0]
+        return file_ext and '.' + file_ext
+    def _getFilenameSuffix(self):
+        if not hasattr(self, "_filename_suffix"):
+            self._filename_extension = self.createFilenameSuffix()
+        return self._filename_extension
+    filename_suffix = property(_getFilenameSuffix)
+
+    @classmethod
+    def getParserTags(cls):
+        tags = {}
+        for cls in reversed(getmro(cls)):
+            if hasattr(cls, "PARSER_TAGS"):
+                tags.update(cls.PARSER_TAGS)
+        return tags
+
+    @classmethod
+    def print_(cls, out, verbose):
+        tags = cls.getParserTags()
+        print >>out, "- %s: %s" % (tags["id"], tags["description"])
+        if verbose:
+            if "mime" in tags:
+                print >>out, "  MIME type: %s" % (", ".join(tags["mime"]))
+            if "file_ext" in tags:
+                file_ext = ", ".join(
+                    ".%s" % file_ext for file_ext in tags["file_ext"])
+                print >>out, "  File extension: %s" % file_ext
+
+    autofix = property(lambda self: self._autofix and config.autofix)
+
+class Parser(HachoirParser, GenericParser):
+    def __init__(self, stream, **args):
+        GenericParser.__init__(self, stream)
+        HachoirParser.__init__(self, stream, **args)
+
diff --git a/lib/hachoir_parser/parser_list.py b/lib/hachoir_parser/parser_list.py
new file mode 100644
index 0000000000000000000000000000000000000000..38071550b2566893516bc389312b18a1a18685ea
--- /dev/null
+++ b/lib/hachoir_parser/parser_list.py
@@ -0,0 +1,216 @@
+import re
+import types
+from hachoir_core.error import error
+from hachoir_core.i18n import _
+from hachoir_parser import Parser, HachoirParser
+import sys
+
+### Parser list ################################################################
+
+class ParserList(object):
+    VALID_CATEGORY = ("archive", "audio", "container", "file_system",
+        "game", "image", "misc", "program", "video")
+    ID_REGEX = re.compile("^[a-z0-9][a-z0-9_]{2,}$")
+
+    def __init__(self):
+        self.parser_list = []
+        self.bytag = { "id": {}, "category": {} }
+
+    def translate(self, name, value):
+        if name in ("magic",):
+            return True
+        elif name == "min_size":
+            return - value < 0 or "Invalid minimum size (min_size)"
+        elif name == "description":
+            return isinstance(value, (str, unicode)) and bool(value) or "Invalid description"
+        elif name == "category":
+            if value not in self.VALID_CATEGORY:
+                return "Invalid category: %r" % value
+        elif name == "id":
+            if type(value) is not str or not self.ID_REGEX.match(value):
+                return "Invalid identifier: %r" % value
+            parser = self.bytag[name].get(value)
+            if parser:
+                return "Duplicate parser id: %s already used by %s" % \
+                    (value, parser[0].__name__)
+        # TODO: lists should be forbidden
+        if isinstance(value, list):
+            value = tuple(value)
+        elif not isinstance(value, tuple):
+            value = value,
+        return name, value
+
+    def validParser(self, parser, tags):
+        if "id" not in tags:
+            return "No identifier"
+        if "description" not in tags:
+            return "No description"
+        # TODO: Allow simple strings for file_ext/mime ?
+        # (see also HachoirParser.createFilenameSuffix)
+        file_ext = tags.get("file_ext", ())
+        if not isinstance(file_ext, (tuple, list)):
+            return "File extension is not a tuple or list"
+        mimes = tags.get("mime", ())
+        if not isinstance(mimes, tuple):
+            return "MIME type is not a tuple"
+        for mime in mimes:
+            if not isinstance(mime, unicode):
+                return "MIME type %r is not an unicode string" % mime
+
+        return ""
+
+    def add(self, parser):
+        tags = parser.getParserTags()
+        err = self.validParser(parser, tags)
+        if err:
+            error("Skip parser %s: %s" % (parser.__name__, err))
+            return
+
+        _tags = []
+        for tag in tags.iteritems():
+            tag = self.translate(*tag)
+            if isinstance(tag, tuple):
+                _tags.append(tag)
+            elif tag is not True:
+                error("[%s] %s" % (parser.__name__, tag))
+                return
+
+        self.parser_list.append(parser)
+
+        for name, values in _tags:
+            byname = self.bytag.setdefault(name,{})
+            for value in values:
+                byname.setdefault(value,[]).append(parser)
+
+    def __iter__(self):
+        return iter(self.parser_list)
+
+    def print_(self, title=None, out=None, verbose=False, format="one-line"):
+        """Display a list of parser with its title
+         * out: output file
+         * title : title of the list to display
+         * format: "rest", "trac", "file-ext", "mime" or "one_line" (default)
+        """
+        if out is None:
+            out = sys.stdout
+
+        if format in ("file-ext", "mime"):
+            # Create file extension set
+            extensions = set()
+            for parser in self:
+                file_ext = parser.getParserTags().get(format, ())
+                file_ext = list(file_ext)
+                try:
+                    file_ext.remove("")
+                except ValueError:
+                    pass
+                extensions |= set(file_ext)
+
+            # Remove empty extension
+            extensions -= set(('',))
+
+            # Convert to list and sort by ASCII order
+            extensions = list(extensions)
+            extensions.sort()
+
+            # Print list
+            text = ", ".join( str(item) for item in extensions )
+            if format == "file-ext":
+                print >>out, "File extensions: %s." % text
+                print >>out
+                print >>out, "Total: %s file extensions." % len(extensions)
+            else:
+                print >>out, "MIME types: %s." % text
+                print >>out
+                print >>out, "Total: %s MIME types." % len(extensions)
+            return
+
+        if format == "trac":
+            print >>out, "== List of parsers =="
+            print >>out
+            print >>out, "Total: %s parsers" % len(self.parser_list)
+            print >>out
+        elif format == "one_line":
+            if title:
+                print >>out, title
+            else:
+                print >>out, _("List of Hachoir parsers.")
+            print >>out
+
+        # Create parser list sorted by module
+        bycategory = self.bytag["category"]
+        for category in sorted(bycategory.iterkeys()):
+            if format == "one_line":
+                parser_list = [ parser.PARSER_TAGS["id"] for parser in bycategory[category] ]
+                parser_list.sort()
+                print >>out, "- %s: %s" % (category.title(), ", ".join(parser_list))
+            else:
+                if format == "rest":
+                    print >>out, category.replace("_", " ").title()
+                    print >>out, "-" * len(category)
+                    print >>out
+                elif format == "trac":
+                    print >>out, "=== %s ===" % category.replace("_", " ").title()
+                    print >>out
+                else:
+                    print >>out, "[%s]" % category
+                parser_list = sorted(bycategory[category],
+                    key=lambda parser: parser.PARSER_TAGS["id"])
+                if format == "rest":
+                    for parser in parser_list:
+                        tags = parser.getParserTags()
+                        print >>out, "* %s: %s" % (tags["id"], tags["description"])
+                elif format == "trac":
+                    for parser in parser_list:
+                        tags = parser.getParserTags()
+                        desc = tags["description"]
+                        desc = re.sub(r"([A-Z][a-z]+[A-Z][^ ]+)", r"!\1", desc)
+                        print >>out, " * %s: %s" % (tags["id"], desc)
+                else:
+                    for parser in parser_list:
+                        parser.print_(out, verbose)
+                print >>out
+        if format != "trac":
+            print >>out, "Total: %s parsers" % len(self.parser_list)
+
+
+class HachoirParserList(ParserList):
+    _instance = None
+
+    @classmethod
+    def getInstance(cls):
+        if cls._instance is None:
+            cls._instance = cls()
+        return cls._instance
+
+    def __init__(self):
+        ParserList.__init__(self)
+        self._load()
+
+    def _load(self):
+        """
+        Load all parsers from "hachoir.parser" module.
+
+        Return the list of loaded parsers.
+        """
+        # Parser list is already loaded?
+        if self.parser_list:
+            return self.parser_list
+
+        todo = []
+        module = __import__("hachoir_parser")
+        for attrname in dir(module):
+            attr = getattr(module, attrname)
+            if isinstance(attr, types.ModuleType):
+                todo.append(attr)
+
+        for module in todo:
+            for name in dir(module):
+                attr = getattr(module, name)
+                if isinstance(attr, type) \
+                and issubclass(attr, HachoirParser) \
+                and attr not in (Parser, HachoirParser):
+                    self.add(attr)
+        assert 1 <= len(self.parser_list)
+        return self.parser_list
+
diff --git a/lib/hachoir_parser/program/__init__.py b/lib/hachoir_parser/program/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e719f02b32505168181c2764a8572938755d339
--- /dev/null
+++ b/lib/hachoir_parser/program/__init__.py
@@ -0,0 +1,6 @@
+from hachoir_parser.program.elf import ElfFile
+from hachoir_parser.program.exe import ExeFile
+from hachoir_parser.program.python import PythonCompiledFile
+from hachoir_parser.program.java import JavaCompiledClassFile
+from hachoir_parser.program.prc import PRCFile
+
diff --git a/lib/hachoir_parser/program/elf.py b/lib/hachoir_parser/program/elf.py
new file mode 100644
index 0000000000000000000000000000000000000000..3d5731e2244d0f245931454c38a6c420c1125ba7
--- /dev/null
+++ b/lib/hachoir_parser/program/elf.py
@@ -0,0 +1,187 @@
+"""
+ELF (Unix/BSD executable file format) parser.
+
+Author: Victor Stinner
+Creation date: 08 may 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt16, UInt32, Enum,
+    String, Bytes)
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+
+class ElfHeader(FieldSet):
+    static_size = 52*8
+    LITTLE_ENDIAN_ID = 1
+    BIG_ENDIAN_ID = 2
+    MACHINE_NAME = {
+        1: u"AT&T WE 32100",
+        2: u"SPARC",
+        3: u"Intel 80386",
+        4: u"Motorola 68000",
+        5: u"Motorola 88000",
+        7: u"Intel 80860",
+        8: u"MIPS RS3000"
+    }
+    CLASS_NAME = {
+        1: u"32 bits",
+        2: u"64 bits"
+    }
+    TYPE_NAME = {
+             0: u"No file type",
+             1: u"Relocatable file",
+             2: u"Executable file",
+             3: u"Shared object file",
+             4: u"Core file",
+        0xFF00: u"Processor-specific (0xFF00)",
+        0xFFFF: u"Processor-specific (0xFFFF)"
+    }
+    ENDIAN_NAME = {
+        LITTLE_ENDIAN_ID: "Little endian",
+        BIG_ENDIAN_ID: "Big endian",
+    }
+
+    def createFields(self):
+        yield Bytes(self, "signature", 4, r'ELF signature ("\x7fELF")')
+        yield Enum(UInt8(self, "class", "Class"), self.CLASS_NAME)
+        yield Enum(UInt8(self, "endian", "Endian"), self.ENDIAN_NAME)
+        yield UInt8(self, "file_version", "File version")
+        yield String(self, "pad", 8, "Pad")
+        yield UInt8(self, "nb_ident", "Size of ident[]")
+        yield Enum(UInt16(self, "type", "File type"), self.TYPE_NAME)
+        yield Enum(UInt16(self, "machine", "Machine type"), self.MACHINE_NAME)
+        yield UInt32(self, "version", "ELF format version")
+        yield UInt32(self, "entry", "Number of entries")
+        yield UInt32(self, "phoff", "Program header offset")
+        yield UInt32(self, "shoff", "Section header offset")
+        yield UInt32(self, "flags", "Flags")
+        yield UInt16(self, "ehsize", "Elf header size (this header)")
+        yield UInt16(self, "phentsize", "Program header entry size")
+        yield UInt16(self, "phnum", "Program header entry count")
+        yield UInt16(self, "shentsize", "Section header entry size")
+        yield UInt16(self, "shnum", "Section header entre count")
+        yield UInt16(self, "shstrndx", "Section header strtab index")
+
+    def isValid(self):
+        if self["signature"].value != "\x7FELF":
+            return "Wrong ELF signature"
+        if self["class"].value not in self.CLASS_NAME:
+            return "Unknown class"
+        if self["endian"].value not in self.ENDIAN_NAME:
+            return "Unknown endian (%s)" % self["endian"].value
+        return ""
+
+class SectionHeader32(FieldSet):
+    static_size = 40*8
+    TYPE_NAME = {
+        8: "BSS"
+    }
+
+    def createFields(self):
+        yield UInt32(self, "name", "Name")
+        yield Enum(UInt32(self, "type", "Type"), self.TYPE_NAME)
+        yield UInt32(self, "flags", "Flags")
+        yield textHandler(UInt32(self, "VMA", "Virtual memory address"), hexadecimal)
+        yield textHandler(UInt32(self, "LMA", "Logical memory address (in file)"), hexadecimal)
+        yield textHandler(UInt32(self, "size", "Size"), hexadecimal)
+        yield UInt32(self, "link", "Link")
+        yield UInt32(self, "info", "Information")
+        yield UInt32(self, "addr_align", "Address alignment")
+        yield UInt32(self, "entry_size", "Entry size")
+
+    def createDescription(self):
+        return "Section header (name: %s, type: %s)" % \
+            (self["name"].value, self["type"].display)
+
+class ProgramHeader32(FieldSet):
+    TYPE_NAME = {
+        3: "Dynamic library"
+    }
+    static_size = 32*8
+
+    def createFields(self):
+        yield Enum(UInt16(self, "type", "Type"), ProgramHeader32.TYPE_NAME)
+        yield UInt16(self, "flags", "Flags")
+        yield UInt32(self, "offset", "Offset")
+        yield textHandler(UInt32(self, "vaddr", "V. address"), hexadecimal)
+        yield textHandler(UInt32(self, "paddr", "P. address"), hexadecimal)
+        yield UInt32(self, "file_size", "File size")
+        yield UInt32(self, "mem_size", "Memory size")
+        yield UInt32(self, "align", "Alignment")
+        yield UInt32(self, "xxx", "???")
+
+    def createDescription(self):
+        return "Program Header (%s)" % self["type"].display
+
+def sortSection(a, b):
+    return int(a["offset"] - b["offset"])
+
+#class Sections(FieldSet):
+#    def createFields?(self, stream, parent, sections):
+#        for section in sections:
+#            ofs = section["offset"]
+#            size = section["file_size"]
+#            if size != 0:
+#                sub = stream.createSub(ofs, size)
+#                #yield DeflateFilter(self, "section[]", sub, size, Section,  "Section"))
+#                chunk = self.doRead("section[]", "Section", (Section,), {"stream": sub})
+#            else:
+#                chunk = self.doRead("section[]", "Section", (FormatChunk, "string[0]"))
+#            chunk.description = "ELF section (in file: %s..%s)" % (ofs, ofs+size)
+
+class ElfFile(Parser):
+    PARSER_TAGS = {
+        "id": "elf",
+        "category": "program",
+        "file_ext": ("so", ""),
+        "min_size": ElfHeader.static_size,  # At least one program header
+        "mime": (
+            u"application/x-executable",
+            u"application/x-object",
+            u"application/x-sharedlib",
+            u"application/x-executable-file",
+            u"application/x-coredump"),
+        "magic": (("\x7FELF", 0),),
+        "description": "ELF Unix/BSD program/library"
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        err = self["header"].isValid()
+        if err:
+            return err
+        return True
+
+    def createFields(self):
+        # Choose the right endian depending on endian specified in header
+        if self.stream.readBits(5*8, 8, BIG_ENDIAN) == ElfHeader.BIG_ENDIAN_ID:
+            self.endian = BIG_ENDIAN
+        else:
+            self.endian = LITTLE_ENDIAN
+
+        # Parse header and program headers
+        yield ElfHeader(self, "header", "Header")
+        for index in xrange(self["header/phnum"].value):
+            yield ProgramHeader32(self, "prg_header[]")
+
+        if False:
+            raise ParserError("TODO: Parse sections...")
+            #sections = self.array("prg_header")
+            #size = self["header/shoff"].value - self.current_size//8
+            #chunk = self.doRead("data", "Data", (DeflateFilter, stream, size, Sections, sections))
+            #chunk.description = "Sections (use an evil hack to manage share same data on differents parts)"
+            #assert self.current_size//8 == self["header/shoff"].value
+        else:
+            raw = self.seekByte(self["header/shoff"].value, "raw[]", relative=False)
+            if raw:
+                yield raw
+
+        for index in xrange(self["header/shnum"].value):
+            yield SectionHeader32(self, "section_header[]")
+
+    def createDescription(self):
+        return "ELF Unix/BSD program/library: %s" % (
+            self["header/class"].display)
+
diff --git a/lib/hachoir_parser/program/exe.py b/lib/hachoir_parser/program/exe.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a7bc72769195c154909fa8e678188d498ca02ae
--- /dev/null
+++ b/lib/hachoir_parser/program/exe.py
@@ -0,0 +1,224 @@
+"""
+Microsoft Windows Portable Executable (PE) file parser.
+
+Informations:
+- Microsoft Portable Executable and Common Object File Format Specification:
+  http://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx
+
+Author: Victor Stinner
+Creation date: 2006-08-13
+"""
+
+from hachoir_parser import HachoirParser
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.field import (FieldSet, RootSeekableFieldSet,
+    UInt16, UInt32, String,
+    RawBytes, PaddingBytes)
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_parser.program.exe_ne import NE_Header
+from hachoir_parser.program.exe_pe import PE_Header, PE_OptHeader, SectionHeader
+from hachoir_parser.program.exe_res import PE_Resource, NE_VersionInfoNode
+
+MAX_NB_SECTION = 50
+
+class MSDosHeader(FieldSet):
+    static_size = 64*8
+
+    def createFields(self):
+        yield String(self, "header", 2, "File header (MZ)", charset="ASCII")
+        yield UInt16(self, "size_mod_512", "File size in bytes modulo 512")
+        yield UInt16(self, "size_div_512", "File size in bytes divide by 512")
+        yield UInt16(self, "reloc_entries", "Number of relocation entries")
+        yield UInt16(self, "code_offset", "Offset to the code in the file (divided by 16)")
+        yield UInt16(self, "needed_memory", "Memory needed to run (divided by 16)")
+        yield UInt16(self, "max_memory", "Maximum memory needed to run (divided by 16)")
+        yield textHandler(UInt32(self, "init_ss_sp", "Initial value of SP:SS registers"), hexadecimal)
+        yield UInt16(self, "checksum", "Checksum")
+        yield textHandler(UInt32(self, "init_cs_ip", "Initial value of CS:IP registers"), hexadecimal)
+        yield UInt16(self, "reloc_offset", "Offset in file to relocation table")
+        yield UInt16(self, "overlay_number", "Overlay number")
+        yield PaddingBytes(self, "reserved[]", 8, "Reserved")
+        yield UInt16(self, "oem_id", "OEM id")
+        yield UInt16(self, "oem_info", "OEM info")
+        yield PaddingBytes(self, "reserved[]", 20, "Reserved")
+        yield UInt32(self, "next_offset", "Offset to next header (PE or NE)")
+
+    def isValid(self):
+        if 512 <= self["size_mod_512"].value:
+            return "Invalid field 'size_mod_512' value"
+        if self["code_offset"].value < 4:
+            return "Invalid code offset"
+        looks_pe = self["size_div_512"].value < 4
+        if looks_pe:
+            if self["checksum"].value != 0:
+                return "Invalid value of checksum"
+            if not (80 <= self["next_offset"].value <= 1024):
+                return "Invalid value of next_offset"
+        return ""
+
+class ExeFile(HachoirParser, RootSeekableFieldSet):
+    PARSER_TAGS = {
+        "id": "exe",
+        "category": "program",
+        "file_ext": ("exe", "dll", "ocx"),
+        "mime": (u"application/x-dosexec",),
+        "min_size": 64*8,
+        #"magic": (("MZ", 0),),
+        "magic_regex": (("MZ.[\0\1].{4}[^\0\1\2\3]", 0),),
+        "description": "Microsoft Windows Portable Executable"
+    }
+    endian = LITTLE_ENDIAN
+
+    def __init__(self, stream, **args):
+        RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self))
+        HachoirParser.__init__(self, stream, **args)
+
+    def validate(self):
+        if self.stream.readBytes(0, 2) != 'MZ':
+            return "Wrong header"
+        err = self["msdos"].isValid()
+        if err:
+            return "Invalid MSDOS header: "+err
+        if self.isPE():
+            if MAX_NB_SECTION < self["pe_header/nb_section"].value:
+                return "Invalid number of section (%s)" \
+                    % self["pe_header/nb_section"].value
+        return True
+
+    def createFields(self):
+        yield MSDosHeader(self, "msdos", "MS-DOS program header")
+
+        if self.isPE() or self.isNE():
+            offset = self["msdos/next_offset"].value
+            self.seekByte(offset, relative=False)
+
+        if self.isPE():
+            for field in self.parsePortableExecutable():
+                yield field
+        elif self.isNE():
+            for field in self.parseNE_Executable():
+                yield field
+        else:
+            offset = self["msdos/code_offset"].value * 16
+            self.seekByte(offset, relative=False)
+
+    def parseNE_Executable(self):
+        yield NE_Header(self, "ne_header")
+
+        # FIXME: Compute resource offset instead of using searchBytes()
+        # Ugly hack to get find version info structure
+        start = self.current_size
+        addr = self.stream.searchBytes('VS_VERSION_INFO', start)
+        if addr:
+            self.seekBit(addr-32)
+            yield NE_VersionInfoNode(self, "info")
+
+    def parsePortableExecutable(self):
+        # Read PE header
+        yield PE_Header(self, "pe_header")
+
+        # Read PE optional header
+        size = self["pe_header/opt_hdr_size"].value
+        rsrc_rva = None
+        if size:
+            yield PE_OptHeader(self, "pe_opt_header", size=size*8)
+            if "pe_opt_header/resource/rva" in self:
+                rsrc_rva = self["pe_opt_header/resource/rva"].value
+
+        # Read section headers
+        sections = []
+        for index in xrange(self["pe_header/nb_section"].value):
+            section = SectionHeader(self, "section_hdr[]")
+            yield section
+            if section["phys_size"].value:
+                sections.append(section)
+
+        # Read sections
+        sections.sort(key=lambda field: field["phys_off"].value)
+        for section in sections:
+            self.seekByte(section["phys_off"].value)
+            size = section["phys_size"].value
+            if size:
+                name = section.createSectionName()
+                if rsrc_rva is not None and section["rva"].value == rsrc_rva:
+                    yield PE_Resource(self, name, section, size=size*8)
+                else:
+                    yield RawBytes(self, name, size)
+
+    def isPE(self):
+        if not hasattr(self, "_is_pe"):
+            self._is_pe = False
+            offset = self["msdos/next_offset"].value * 8
+            if 2*8 <= offset \
+            and (offset+PE_Header.static_size) <= self.size \
+            and self.stream.readBytes(offset, 4) == 'PE\0\0':
+                self._is_pe = True
+        return self._is_pe
+
+    def isNE(self):
+        if not hasattr(self, "_is_ne"):
+            self._is_ne = False
+            offset = self["msdos/next_offset"].value * 8
+            if 64*8 <= offset \
+            and (offset+NE_Header.static_size) <= self.size \
+            and self.stream.readBytes(offset, 2) == 'NE':
+                self._is_ne = True
+        return self._is_ne
+
+    def getResource(self):
+        # MS-DOS program: no resource
+        if not self.isPE():
+            return None
+
+        # Check if PE has resource or not
+        if "pe_opt_header/resource/size" in self:
+            if not self["pe_opt_header/resource/size"].value:
+                return None
+        if "section_rsrc" in self:
+            return self["section_rsrc"]
+        return None
+
+    def createDescription(self):
+        if self.isPE():
+            if self["pe_header/is_dll"].value:
+                text = u"Microsoft Windows DLL"
+            else:
+                text = u"Microsoft Windows Portable Executable"
+            info = [self["pe_header/cpu"].display]
+            if "pe_opt_header" in self:
+                hdr = self["pe_opt_header"]
+                info.append(hdr["subsystem"].display)
+            if self["pe_header/is_stripped"].value:
+                info.append(u"stripped")
+            return u"%s: %s" % (text, ", ".join(info))
+        elif self.isNE():
+            return u"New-style Executable (NE) for Microsoft MS Windows 3.x"
+        else:
+            return u"MS-DOS executable"
+
+    def createContentSize(self):
+        if self.isPE():
+            size = 0
+            for index in xrange(self["pe_header/nb_section"].value):
+                section = self["section_hdr[%u]" % index]
+                section_size = section["phys_size"].value
+                if not section_size:
+                    continue
+                section_size = (section_size + section["phys_off"].value) * 8
+                if size:
+                    size = max(size, section_size)
+                else:
+                    size = section_size
+            if size:
+                return size
+            else:
+                return None
+        elif self.isNE():
+            # TODO: Guess NE size
+            return None
+        else:
+            size = self["msdos/size_mod_512"].value + (self["msdos/size_div_512"].value-1) * 512
+            if size < 0:
+                return None
+        return size*8
+
diff --git a/lib/hachoir_parser/program/exe_ne.py b/lib/hachoir_parser/program/exe_ne.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf62e885da2e9e69b30fffe3f4b0bb15680a5a4d
--- /dev/null
+++ b/lib/hachoir_parser/program/exe_ne.py
@@ -0,0 +1,60 @@
+from hachoir_core.field import (FieldSet,
+    Bit, UInt8, UInt16, UInt32, Bytes,
+    PaddingBits, PaddingBytes, NullBits, NullBytes)
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+
+class NE_Header(FieldSet):
+    static_size = 64*8
+    def createFields(self):
+        yield Bytes(self, "signature", 2, "New executable signature (NE)")
+        yield UInt8(self, "link_ver", "Linker version number")
+        yield UInt8(self, "link_rev", "Linker revision number")
+        yield UInt16(self, "entry_table_ofst", "Offset to the entry table")
+        yield UInt16(self, "entry_table_size", "Length (in bytes) of the entry table")
+        yield PaddingBytes(self, "reserved[]", 4)
+
+        yield Bit(self, "is_dll", "Is a dynamic-link library (DLL)?")
+        yield Bit(self, "is_win_app", "Is a Windows application?")
+        yield PaddingBits(self, "reserved[]", 9)
+        yield Bit(self, "first_seg_code", "First segment contains code that loads the application?")
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "link_error", "Load even if linker detects errors?")
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "is_lib", "Is a library module?")
+
+        yield UInt16(self, "auto_data_seg", "Automatic data segment number")
+        yield filesizeHandler(UInt16(self, "local_heap_size", "Initial size (in bytes) of the local heap"))
+        yield filesizeHandler(UInt16(self, "stack_size", "Initial size (in bytes) of the stack"))
+        yield textHandler(UInt32(self, "cs_ip", "Value of CS:IP"), hexadecimal)
+        yield textHandler(UInt32(self, "ss_sp", "Value of SS:SP"), hexadecimal)
+
+        yield UInt16(self, "nb_entry_seg_tab", "Number of entries in the segment table")
+        yield UInt16(self, "nb_entry_modref_tab", "Number of entries in the module-reference table")
+        yield filesizeHandler(UInt16(self, "size_nonres_name_tab", "Number of bytes in the nonresident-name table"))
+        yield UInt16(self, "seg_tab_ofs", "Segment table offset")
+        yield UInt16(self, "rsrc_ofs", "Resource offset")
+
+        yield UInt16(self, "res_name_tab_ofs", "Resident-name table offset")
+        yield UInt16(self, "mod_ref_tab_ofs", "Module-reference table offset")
+        yield UInt16(self, "import_tab_ofs", "Imported-name table offset")
+
+        yield UInt32(self, "non_res_name_tab_ofs", "Nonresident-name table offset")
+        yield UInt16(self, "nb_mov_ent_pt", "Number of movable entry points")
+        yield UInt16(self, "log2_sector_size", "Log2 of the segment sector size")
+        yield UInt16(self, "nb_rsrc_seg", "Number of resource segments")
+
+        yield Bit(self, "unknown_os_format", "Operating system format is unknown")
+        yield PaddingBits(self, "reserved[]", 1)
+        yield Bit(self, "os_windows", "Operating system is Microsoft Windows")
+        yield NullBits(self, "reserved[]", 6)
+        yield Bit(self, "is_win20_prot", "Is Windows 2.x application running in version 3.x protected mode")
+        yield Bit(self, "is_win20_font", "Is Windows 2.x application supporting proportional fonts")
+        yield Bit(self, "fast_load", "Contains a fast-load area?")
+        yield NullBits(self, "reserved[]", 4)
+
+        yield UInt16(self, "fastload_ofs", "Fast-load area offset (in sector)")
+        yield UInt16(self, "fastload_size", "Fast-load area length (in sector)")
+
+        yield NullBytes(self, "reserved[]", 2)
+        yield textHandler(UInt16(self, "win_version", "Expected Windows version number"), hexadecimal)
+
diff --git a/lib/hachoir_parser/program/exe_pe.py b/lib/hachoir_parser/program/exe_pe.py
new file mode 100644
index 0000000000000000000000000000000000000000..b3b241a58f84a2ce1ed8a1a408ae84895b4423c7
--- /dev/null
+++ b/lib/hachoir_parser/program/exe_pe.py
@@ -0,0 +1,221 @@
+from hachoir_core.field import (FieldSet, ParserError,
+    Bit, UInt8, UInt16, UInt32, TimestampUnix32,
+    Bytes, String, Enum,
+    PaddingBytes, PaddingBits, NullBytes, NullBits)
+from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
+from hachoir_core.error import HACHOIR_ERRORS
+
+class SectionHeader(FieldSet):
+    static_size = 40 * 8
+    def createFields(self):
+        yield String(self, "name", 8, charset="ASCII", strip="\0 ")
+        yield filesizeHandler(UInt32(self, "mem_size", "Size in memory"))
+        yield textHandler(UInt32(self, "rva", "RVA (location) in memory"), hexadecimal)
+        yield filesizeHandler(UInt32(self, "phys_size", "Physical size (on disk)"))
+        yield filesizeHandler(UInt32(self, "phys_off", "Physical location (on disk)"))
+        yield PaddingBytes(self, "reserved", 12)
+
+        # 0x0000000#
+        yield NullBits(self, "reserved[]", 4)
+        # 0x000000#0
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "has_code", "Contains code")
+        yield Bit(self, "has_init_data", "Contains initialized data")
+        yield Bit(self, "has_uninit_data", "Contains uninitialized data")
+        # 0x00000#00
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "has_comment", "Contains comments?")
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "remove", "Contents will not become part of image")
+        # 0x0000#000
+        yield Bit(self, "has_comdata", "Contains comdat?")
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "no_defer_spec_exc", "Reset speculative exceptions handling bits in the TLB entries")
+        yield Bit(self, "gp_rel", "Content can be accessed relative to GP")
+        # 0x000#0000
+        yield NullBits(self, "reserved[]", 4)
+        # 0x00#00000
+        yield NullBits(self, "reserved[]", 4)
+        # 0x0#000000
+        yield Bit(self, "ext_reloc", "Contains extended relocations?")
+        yield Bit(self, "discarded", "Can be discarded?")
+        yield Bit(self, "is_not_cached", "Is not cachable?")
+        yield Bit(self, "is_not_paged", "Is not pageable?")
+        # 0x#0000000
+        yield Bit(self, "is_shareable", "Is shareable?")
+        yield Bit(self, "is_executable", "Is executable?")
+        yield Bit(self, "is_readable", "Is readable?")
+        yield Bit(self, "is_writable", "Is writable?")
+
+    def rva2file(self, rva):
+        return self["phys_off"].value + (rva - self["rva"].value)
+
+    def createDescription(self):
+        rva = self["rva"].value
+        size = self["mem_size"].value
+        info = [
+            "rva=0x%08x..0x%08x" % (rva, rva+size),
+            "size=%s" % self["mem_size"].display,
+        ]
+        if self["is_executable"].value:
+            info.append("exec")
+        if self["is_readable"].value:
+            info.append("read")
+        if self["is_writable"].value:
+            info.append("write")
+        return 'Section "%s": %s' % (self["name"].value, ", ".join(info))
+
+    def createSectionName(self):
+        try:
+            name = str(self["name"].value.strip("."))
+            if name:
+                return "section_%s" % name
+        except HACHOIR_ERRORS, err:
+            self.warning(unicode(err))
+            return "section[]"
+
+class DataDirectory(FieldSet):
+    def createFields(self):
+        yield textHandler(UInt32(self, "rva", "Virtual address"), hexadecimal)
+        yield filesizeHandler(UInt32(self, "size"))
+
+    def createDescription(self):
+        if self["size"].value:
+            return "Directory at %s (%s)" % (
+                self["rva"].display, self["size"].display)
+        else:
+            return "(empty directory)"
+
+class PE_Header(FieldSet):
+    static_size = 24*8
+    cpu_name = {
+        0x0184: u"Alpha AXP",
+        0x01c0: u"ARM",
+        0x014C: u"Intel 80386",
+        0x014D: u"Intel 80486",
+        0x014E: u"Intel Pentium",
+        0x0200: u"Intel IA64",
+        0x0268: u"Motorola 68000",
+        0x0266: u"MIPS",
+        0x0284: u"Alpha AXP 64 bits",
+        0x0366: u"MIPS with FPU",
+        0x0466: u"MIPS16 with FPU",
+        0x01f0: u"PowerPC little endian",
+        0x0162: u"R3000",
+        0x0166: u"MIPS little endian (R4000)",
+        0x0168: u"R10000",
+        0x01a2: u"Hitachi SH3",
+        0x01a6: u"Hitachi SH4",
+        0x0160: u"R3000 (MIPS), big endian",
+        0x0162: u"R3000 (MIPS), little endian",
+        0x0166: u"R4000 (MIPS), little endian",
+        0x0168: u"R10000 (MIPS), little endian",
+        0x0184: u"DEC Alpha AXP",
+        0x01F0: u"IBM Power PC, little endian",
+    }
+
+    def createFields(self):
+        yield Bytes(self, "header", 4, r"PE header signature (PE\0\0)")
+        if self["header"].value != "PE\0\0":
+            raise ParserError("Invalid PE header signature")
+        yield Enum(UInt16(self, "cpu", "CPU type"), self.cpu_name)
+        yield UInt16(self, "nb_section", "Number of sections")
+        yield TimestampUnix32(self, "creation_date", "Creation date")
+        yield UInt32(self, "ptr_to_sym", "Pointer to symbol table")
+        yield UInt32(self, "nb_symbols", "Number of symbols")
+        yield UInt16(self, "opt_hdr_size", "Optional header size")
+
+        yield Bit(self, "reloc_stripped", "If true, don't contain base relocations.")
+        yield Bit(self, "exec_image", "Executable image?")
+        yield Bit(self, "line_nb_stripped", "COFF line numbers stripped?")
+        yield Bit(self, "local_sym_stripped", "COFF symbol table entries stripped?")
+        yield Bit(self, "aggr_ws", "Aggressively trim working set")
+        yield Bit(self, "large_addr", "Application can handle addresses greater than 2 GB")
+        yield NullBits(self, "reserved", 1)
+        yield Bit(self, "reverse_lo", "Little endian: LSB precedes MSB in memory")
+        yield Bit(self, "32bit", "Machine based on 32-bit-word architecture")
+        yield Bit(self, "is_stripped", "Debugging information removed?")
+        yield Bit(self, "swap", "If image is on removable media, copy and run from swap file")
+        yield PaddingBits(self, "reserved2", 1)
+        yield Bit(self, "is_system", "It's a system file")
+        yield Bit(self, "is_dll", "It's a dynamic-link library (DLL)")
+        yield Bit(self, "up", "File should be run only on a UP machine")
+        yield Bit(self, "reverse_hi", "Big endian: MSB precedes LSB in memory")
+
+class PE_OptHeader(FieldSet):
+    SUBSYSTEM_NAME = {
+         1: u"Native",
+         2: u"Windows GUI",
+         3: u"Windows CUI",
+         5: u"OS/2 CUI",
+         7: u"POSIX CUI",
+         8: u"Native Windows",
+         9: u"Windows CE GUI",
+        10: u"EFI application",
+        11: u"EFI boot service driver",
+        12: u"EFI runtime driver",
+        13: u"EFI ROM",
+        14: u"XBOX",
+        16: u"Windows boot application",
+    }
+    DIRECTORY_NAME = {
+         0: "export",
+         1: "import",
+         2: "resource",
+         3: "exception",
+         4: "certificate",
+         5: "relocation",
+         6: "debug",
+         7: "description",
+         8: "global_ptr",
+         9: "tls",   # Thread local storage
+        10: "load_config",
+        11: "bound_import",
+        12: "import_address",
+    }
+    def createFields(self):
+        yield UInt16(self, "signature", "PE optional header signature (0x010b)")
+        # TODO: Support PE32+ (signature=0x020b)
+        if self["signature"].value != 0x010b:
+            raise ParserError("Invalid PE optional header signature")
+        yield UInt8(self, "maj_lnk_ver", "Major linker version")
+        yield UInt8(self, "min_lnk_ver", "Minor linker version")
+        yield filesizeHandler(UInt32(self, "size_code", "Size of code"))
+        yield filesizeHandler(UInt32(self, "size_init_data", "Size of initialized data"))
+        yield filesizeHandler(UInt32(self, "size_uninit_data", "Size of uninitialized data"))
+        yield textHandler(UInt32(self, "entry_point", "Address (RVA) of the code entry point"), hexadecimal)
+        yield textHandler(UInt32(self, "base_code", "Base (RVA) of code"), hexadecimal)
+        yield textHandler(UInt32(self, "base_data", "Base (RVA) of data"), hexadecimal)
+        yield textHandler(UInt32(self, "image_base", "Image base (RVA)"), hexadecimal)
+        yield filesizeHandler(UInt32(self, "sect_align", "Section alignment"))
+        yield filesizeHandler(UInt32(self, "file_align", "File alignment"))
+        yield UInt16(self, "maj_os_ver", "Major OS version")
+        yield UInt16(self, "min_os_ver", "Minor OS version")
+        yield UInt16(self, "maj_img_ver", "Major image version")
+        yield UInt16(self, "min_img_ver", "Minor image version")
+        yield UInt16(self, "maj_subsys_ver", "Major subsystem version")
+        yield UInt16(self, "min_subsys_ver", "Minor subsystem version")
+        yield NullBytes(self, "reserved", 4)
+        yield filesizeHandler(UInt32(self, "size_img", "Size of image"))
+        yield filesizeHandler(UInt32(self, "size_hdr", "Size of headers"))
+        yield textHandler(UInt32(self, "checksum"), hexadecimal)
+        yield Enum(UInt16(self, "subsystem"), self.SUBSYSTEM_NAME)
+        yield UInt16(self, "dll_flags")
+        yield filesizeHandler(UInt32(self, "size_stack_reserve"))
+        yield filesizeHandler(UInt32(self, "size_stack_commit"))
+        yield filesizeHandler(UInt32(self, "size_heap_reserve"))
+        yield filesizeHandler(UInt32(self, "size_heap_commit"))
+        yield UInt32(self, "loader_flags")
+        yield UInt32(self, "nb_directory", "Number of RVA and sizes")
+        for index in xrange(self["nb_directory"].value):
+            try:
+                name = self.DIRECTORY_NAME[index]
+            except KeyError:
+                name = "data_dir[%u]" % index
+            yield DataDirectory(self, name)
+
+    def createDescription(self):
+        return "PE optional header: %s, entry point %s" % (
+            self["subsystem"].display,
+            self["entry_point"].display)
+
diff --git a/lib/hachoir_parser/program/exe_res.py b/lib/hachoir_parser/program/exe_res.py
new file mode 100644
index 0000000000000000000000000000000000000000..850fcf018515995ca48a54f42cdf48dce2d00375
--- /dev/null
+++ b/lib/hachoir_parser/program/exe_res.py
@@ -0,0 +1,445 @@
+"""
+Parser for resource of Microsoft Windows Portable Executable (PE).
+
+Documentation:
+- Wine project
+  VS_FIXEDFILEINFO structure, file include/winver.h
+
+Author: Victor Stinner
+Creation date: 2007-01-19
+"""
+
+from hachoir_core.field import (FieldSet, ParserError, Enum,
+    Bit, Bits, SeekableFieldSet,
+    UInt16, UInt32, TimestampUnix32,
+    RawBytes, PaddingBytes, NullBytes, NullBits,
+    CString, String)
+from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal
+from hachoir_core.tools import createDict, paddingSize, alignValue, makePrintable
+from hachoir_core.error import HACHOIR_ERRORS
+from hachoir_parser.common.win32 import BitmapInfoHeader
+
+MAX_DEPTH = 5
+MAX_INDEX_PER_HEADER = 300
+MAX_NAME_PER_HEADER = MAX_INDEX_PER_HEADER
+
+class Version(FieldSet):
+    static_size = 32
+    def createFields(self):
+        yield textHandler(UInt16(self, "minor", "Minor version number"), hexadecimal)
+        yield textHandler(UInt16(self, "major", "Major version number"), hexadecimal)
+    def createValue(self):
+        return self["major"].value + float(self["minor"].value) / 10000
+
+MAJOR_OS_NAME = {
+    1: "DOS",
+    2: "OS/2 16-bit",
+    3: "OS/2 32-bit",
+    4: "Windows NT",
+}
+
+MINOR_OS_BASE = 0
+MINOR_OS_NAME = {
+    0: "Base",
+    1: "Windows 16-bit",
+    2: "Presentation Manager 16-bit",
+    3: "Presentation Manager 32-bit",
+    4: "Windows 32-bit",
+}
+
+FILETYPE_DRIVER = 3
+FILETYPE_FONT = 4
+FILETYPE_NAME = {
+    1: "Application",
+    2: "DLL",
+    3: "Driver",
+    4: "Font",
+    5: "VXD",
+    7: "Static library",
+}
+
+DRIVER_SUBTYPE_NAME = {
+     1: "Printer",
+     2: "Keyboard",
+     3: "Language",
+     4: "Display",
+     5: "Mouse",
+     6: "Network",
+     7: "System",
+     8: "Installable",
+     9: "Sound",
+    10: "Communications",
+}
+
+FONT_SUBTYPE_NAME = {
+    1: "Raster",
+    2: "Vector",
+    3: "TrueType",
+}
+
+class VersionInfoBinary(FieldSet):
+    def createFields(self):
+        yield textHandler(UInt32(self, "magic", "File information magic (0xFEEF04BD)"), hexadecimal)
+        if self["magic"].value != 0xFEEF04BD:
+            raise ParserError("EXE resource: invalid file info magic")
+        yield Version(self, "struct_ver", "Structure version (1.0)")
+        yield Version(self, "file_ver_ms", "File version MS")
+        yield Version(self, "file_ver_ls", "File version LS")
+        yield Version(self, "product_ver_ms", "Product version MS")
+        yield Version(self, "product_ver_ls", "Product version LS")
+        yield textHandler(UInt32(self, "file_flags_mask"), hexadecimal)
+
+        yield Bit(self, "debug")
+        yield Bit(self, "prerelease")
+        yield Bit(self, "patched")
+        yield Bit(self, "private_build")
+        yield Bit(self, "info_inferred")
+        yield Bit(self, "special_build")
+        yield NullBits(self, "reserved", 26)
+
+        yield Enum(textHandler(UInt16(self, "file_os_major"), hexadecimal), MAJOR_OS_NAME)
+        yield Enum(textHandler(UInt16(self, "file_os_minor"), hexadecimal), MINOR_OS_NAME)
+        yield Enum(textHandler(UInt32(self, "file_type"), hexadecimal), FILETYPE_NAME)
+        field = textHandler(UInt32(self, "file_subfile"), hexadecimal)
+        if field.value == FILETYPE_DRIVER:
+            field = Enum(field, DRIVER_SUBTYPE_NAME)
+        elif field.value == FILETYPE_FONT:
+            field = Enum(field, FONT_SUBTYPE_NAME)
+        yield field
+        yield TimestampUnix32(self, "date_ms")
+        yield TimestampUnix32(self, "date_ls")
+
+class VersionInfoNode(FieldSet):
+    TYPE_STRING = 1
+    TYPE_NAME = {
+        0: "binary",
+        1: "string",
+    }
+
+    def __init__(self, parent, name, is_32bit=True):
+        FieldSet.__init__(self, parent, name)
+        self._size = alignValue(self["size"].value, 4) * 8
+        self.is_32bit = is_32bit
+
+    def createFields(self):
+        yield UInt16(self, "size", "Node size (in bytes)")
+        yield UInt16(self, "data_size")
+        yield Enum(UInt16(self, "type"), self.TYPE_NAME)
+        yield CString(self, "name", charset="UTF-16-LE")
+
+        size = paddingSize(self.current_size//8, 4)
+        if size:
+            yield NullBytes(self, "padding[]", size)
+        size = self["data_size"].value
+        if size:
+            if self["type"].value == self.TYPE_STRING:
+                if self.is_32bit:
+                    size *= 2
+                yield String(self, "value", size, charset="UTF-16-LE", truncate="\0")
+            elif self["name"].value == "VS_VERSION_INFO":
+                yield VersionInfoBinary(self, "value", size=size*8)
+                if self["value/file_flags_mask"].value == 0:
+                    self.is_32bit = False
+            else:
+                yield RawBytes(self, "value", size)
+        while 12 <= (self.size - self.current_size) // 8:
+            yield VersionInfoNode(self, "node[]", self.is_32bit)
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield NullBytes(self, "padding[]", size)
+
+
+    def createDescription(self):
+        text = "Version info node: %s" % self["name"].value
+        if self["type"].value == self.TYPE_STRING and "value" in self:
+            text += "=%s" % self["value"].value
+        return text
+
+def parseVersionInfo(parent):
+    yield VersionInfoNode(parent, "node[]")
+
+def parseIcon(parent):
+    yield BitmapInfoHeader(parent, "bmp_header")
+    size = (parent.size - parent.current_size) // 8
+    if size:
+        yield RawBytes(parent, "raw", size)
+
+class WindowsString(FieldSet):
+    def createFields(self):
+        yield UInt16(self, "length", "Number of 16-bit characters")
+        size = self["length"].value * 2
+        if size:
+            yield String(self, "text", size, charset="UTF-16-LE")
+
+    def createValue(self):
+        if "text" in self:
+            return self["text"].value
+        else:
+            return u""
+
+    def createDisplay(self):
+        return makePrintable(self.value, "UTF-8", to_unicode=True, quote='"')
+
+def parseStringTable(parent):
+    while not parent.eof:
+        yield WindowsString(parent, "string[]")
+
+RESOURCE_TYPE = {
+    1: ("cursor[]", "Cursor", None),
+    2: ("bitmap[]", "Bitmap", None),
+    3: ("icon[]", "Icon", parseIcon),
+    4: ("menu[]", "Menu", None),
+    5: ("dialog[]", "Dialog", None),
+    6: ("string_table[]", "String table", parseStringTable),
+    7: ("font_dir[]", "Font directory", None),
+    8: ("font[]", "Font", None),
+    9: ("accelerators[]", "Accelerators", None),
+    10: ("raw_res[]", "Unformatted resource data", None),
+    11: ("message_table[]", "Message table", None),
+    12: ("group_cursor[]", "Group cursor", None),
+    14: ("group_icon[]", "Group icon", None),
+    16: ("version_info", "Version information", parseVersionInfo),
+}
+
+class Entry(FieldSet):
+    static_size = 16*8
+
+    def __init__(self, parent, name, inode=None):
+        FieldSet.__init__(self, parent, name)
+        self.inode = inode
+
+    def createFields(self):
+        yield textHandler(UInt32(self, "rva"), hexadecimal)
+        yield filesizeHandler(UInt32(self, "size"))
+        yield UInt32(self, "codepage")
+        yield NullBytes(self, "reserved", 4)
+
+    def createDescription(self):
+        return "Entry #%u: offset=%s size=%s" % (
+            self.inode["offset"].value, self["rva"].display, self["size"].display)
+
+class NameOffset(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "name")
+        yield Bits(self, "offset", 31)
+        yield Bit(self, "is_name")
+
+class IndexOffset(FieldSet):
+    TYPE_DESC = createDict(RESOURCE_TYPE, 1)
+
+    def __init__(self, parent, name, res_type=None):
+        FieldSet.__init__(self, parent, name)
+        self.res_type = res_type
+
+    def createFields(self):
+        yield Enum(UInt32(self, "type"), self.TYPE_DESC)
+        yield Bits(self, "offset", 31)
+        yield Bit(self, "is_subdir")
+
+    def createDescription(self):
+        if self["is_subdir"].value:
+            return "Sub-directory: %s at %s" % (self["type"].display, self["offset"].value)
+        else:
+            return "Index: ID %s at %s" % (self["type"].display, self["offset"].value)
+
+class ResourceContent(FieldSet):
+    def __init__(self, parent, name, entry, size=None):
+        FieldSet.__init__(self, parent, name, size=entry["size"].value*8)
+        self.entry = entry
+        res_type = self.getResType()
+        if res_type in RESOURCE_TYPE:
+            self._name, description, self._parser = RESOURCE_TYPE[res_type]
+        else:
+            self._parser = None
+
+    def getResID(self):
+        return self.entry.inode["offset"].value
+
+    def getResType(self):
+        return self.entry.inode.res_type
+
+    def createFields(self):
+        if self._parser:
+            for field in self._parser(self):
+                yield field
+        else:
+            yield RawBytes(self, "content", self.size//8)
+
+    def createDescription(self):
+        return "Resource #%u content: type=%s" % (
+            self.getResID(), self.getResType())
+
+class Header(FieldSet):
+    static_size = 16*8
+    def createFields(self):
+        yield NullBytes(self, "options", 4)
+        yield TimestampUnix32(self, "creation_date")
+        yield UInt16(self, "maj_ver", "Major version")
+        yield UInt16(self, "min_ver", "Minor version")
+        yield UInt16(self, "nb_name", "Number of named entries")
+        yield UInt16(self, "nb_index", "Number of indexed entries")
+
+    def createDescription(self):
+        text = "Resource header"
+        info = []
+        if self["nb_name"].value:
+            info.append("%u name" % self["nb_name"].value)
+        if self["nb_index"].value:
+            info.append("%u index" % self["nb_index"].value)
+        if self["creation_date"].value:
+            info.append(self["creation_date"].display)
+        if info:
+            return "%s: %s" % (text, ", ".join(info))
+        else:
+            return text
+
+class Name(FieldSet):
+    def createFields(self):
+        yield UInt16(self, "length")
+        size = min(self["length"].value, 255)
+        if size:
+            yield String(self, "name", size, charset="UTF-16LE")
+
+class Directory(FieldSet):
+    def __init__(self, parent, name, res_type=None):
+        FieldSet.__init__(self, parent, name)
+        nb_entries = self["header/nb_name"].value + self["header/nb_index"].value
+        self._size = Header.static_size + nb_entries * 64
+        self.res_type = res_type
+
+    def createFields(self):
+        yield Header(self, "header")
+
+        if MAX_NAME_PER_HEADER < self["header/nb_name"].value:
+            raise ParserError("EXE resource: invalid number of name (%s)"
+                % self["header/nb_name"].value)
+        if MAX_INDEX_PER_HEADER < self["header/nb_index"].value:
+            raise ParserError("EXE resource: invalid number of index (%s)"
+                % self["header/nb_index"].value)
+
+        hdr = self["header"]
+        for index in xrange(hdr["nb_name"].value):
+            yield NameOffset(self, "name[]")
+        for index in xrange(hdr["nb_index"].value):
+            yield IndexOffset(self, "index[]", self.res_type)
+
+    def createDescription(self):
+        return self["header"].description
+
+class PE_Resource(SeekableFieldSet):
+    def __init__(self, parent, name, section, size):
+        SeekableFieldSet.__init__(self, parent, name, size=size)
+        self.section = section
+
+    def parseSub(self, directory, name, depth):
+        indexes = []
+        for index in directory.array("index"):
+            if index["is_subdir"].value:
+                indexes.append(index)
+
+        #indexes.sort(key=lambda index: index["offset"].value)
+        for index in indexes:
+            self.seekByte(index["offset"].value)
+            if depth == 1:
+                res_type = index["type"].value
+            else:
+                res_type = directory.res_type
+            yield Directory(self, name, res_type)
+
+    def createFields(self):
+        # Parse directories
+        depth = 0
+        subdir = Directory(self, "root")
+        yield subdir
+        subdirs = [subdir]
+        alldirs = [subdir]
+        while subdirs:
+            depth += 1
+            if MAX_DEPTH < depth:
+                self.error("EXE resource: depth too high (%s), stop parsing directories" % depth)
+                break
+            newsubdirs = []
+            for index, subdir in enumerate(subdirs):
+                name = "directory[%u][%u][]" % (depth, index)
+                try:
+                    for field in self.parseSub(subdir, name, depth):
+                        if field.__class__ == Directory:
+                            newsubdirs.append(field)
+                        yield field
+                except HACHOIR_ERRORS, err:
+                    self.error("Unable to create directory %s: %s" % (name, err))
+            subdirs = newsubdirs
+            alldirs.extend(subdirs)
+
+        # Create resource list
+        resources = []
+        for directory in alldirs:
+            for index in directory.array("index"):
+                if not index["is_subdir"].value:
+                    resources.append(index)
+
+        # Parse entries
+        entries = []
+        for resource in resources:
+            offset = resource["offset"].value
+            if offset is None:
+                continue
+            self.seekByte(offset)
+            entry = Entry(self, "entry[]", inode=resource)
+            yield entry
+            entries.append(entry)
+        entries.sort(key=lambda entry: entry["rva"].value)
+
+        # Parse resource content
+        for entry in entries:
+            try:
+                offset = self.section.rva2file(entry["rva"].value)
+                padding = self.seekByte(offset, relative=False)
+                if padding:
+                    yield padding
+                yield ResourceContent(self, "content[]", entry)
+            except HACHOIR_ERRORS, err:
+                self.warning("Error when parsing entry %s: %s" % (entry.path, err))
+
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield PaddingBytes(self, "padding_end", size)
+
+class NE_VersionInfoNode(FieldSet):
+    TYPE_STRING = 1
+    TYPE_NAME = {
+        0: "binary",
+        1: "string",
+    }
+
+    def __init__(self, parent, name):
+        FieldSet.__init__(self, parent, name)
+        self._size = alignValue(self["size"].value, 4) * 8
+
+    def createFields(self):
+        yield UInt16(self, "size", "Node size (in bytes)")
+        yield UInt16(self, "data_size")
+        yield CString(self, "name", charset="ISO-8859-1")
+
+        size = paddingSize(self.current_size//8, 4)
+        if size:
+            yield NullBytes(self, "padding[]", size)
+        size = self["data_size"].value
+        if size:
+            if self["name"].value == "VS_VERSION_INFO":
+                yield VersionInfoBinary(self, "value", size=size*8)
+            else:
+                yield String(self, "value", size, charset="ISO-8859-1")
+        while 12 <= (self.size - self.current_size) // 8:
+            yield NE_VersionInfoNode(self, "node[]")
+        size = (self.size - self.current_size) // 8
+        if size:
+            yield NullBytes(self, "padding[]", size)
+
+
+    def createDescription(self):
+        text = "Version info node: %s" % self["name"].value
+#        if self["type"].value == self.TYPE_STRING and "value" in self:
+#            text += "=%s" % self["value"].value
+        return text
+
diff --git a/lib/hachoir_parser/program/java.py b/lib/hachoir_parser/program/java.py
new file mode 100644
index 0000000000000000000000000000000000000000..7329cbe07aa08bc54a7368a38f84116cdeaa1f19
--- /dev/null
+++ b/lib/hachoir_parser/program/java.py
@@ -0,0 +1,1097 @@
+"""
+Compiled Java classes parser.
+
+Author: Thomas de Grenier de Latour (TGL) <degrenier@easyconnect.fr>
+Creation: 2006/11/01
+Last-update: 2006/11/06
+
+Introduction:
+ * This parser is for compiled Java classes, aka .class files.  What is nice
+   with this format is that it is well documented in the official Java VM specs.
+ * Some fields, and most field sets, have dynamic sizes, and there is no offset
+   to directly jump from an header to a given section, or anything like that.
+   It means that accessing a field at the end of the file requires that you've
+   already parsed almost the whole file.  That's not very efficient, but it's
+   okay given the usual size of .class files (usually a few KB).
+ * Most fields are just indexes of some "constant pool" entries, which holds
+   most constant datas of the class.  And constant pool entries reference other
+   constant pool entries, etc.  Hence, a raw display of this fields only shows
+   integers and is not really understandable.  Because of that, this parser
+   comes with two important custom field classes:
+    - CPInfo are constant pool entries.  They have a type ("Utf8", "Methodref",
+      etc.), and some contents fields depending on this type.  They also have a
+      "__str__()" method, which returns a syntetic view of this contents.
+    - CPIndex are constant pool indexes (UInt16).  It is possible to specify
+      what type of CPInfo they are allowed to points to.  They also have a
+      custom display method, usually printing something like "->  foo", where
+      foo is the str() of their target CPInfo.
+
+References:
+ * The Java Virtual Machine Specification, 2nd edition, chapter 4, in HTML:
+   http://java.sun.com/docs/books/vmspec/2nd-edition/html/ClassFile.doc.html
+    => That's the spec i've been implementing so far. I think it is format
+       version 46.0 (JDK 1.2).
+ * The Java Virtual Machine Specification, 2nd edition, chapter 4, in PDF:
+   http://java.sun.com/docs/books/vmspec/2nd-edition/ClassFileFormat.pdf
+    => don't trust the URL, this PDF version is more recent than the HTML one.
+       It highligths some recent additions to the format (i don't know the
+       exact version though), which are not yet implemented in this parser.
+ * The Java Virtual Machine Specification, chapter 4:
+   http://java.sun.com/docs/books/vmspec/html/ClassFile.doc.html
+    => describes an older format, probably version 45.3 (JDK 1.1).
+
+TODO/FIXME:
+ * Google for some existing free .class files parsers, to get more infos on
+   the various formats differences, etc.
+ * Write/compile some good tests cases.
+ * Rework pretty-printing of CPIndex fields.  This str() thing sinks.
+ * Add support of formats other than 46.0 (45.3 seems to already be ok, but
+   there are things to add for later formats).
+ * Make parsing robust: currently, the parser will die on asserts as soon as
+   something seems wrong.  It should rather be tolerant, print errors/warnings,
+   and try its best to continue.  Check how error-handling is done in other
+   parsers.
+ * Gettextize the whole thing.
+ * Check whether Float32/64 are really the same as Java floats/double. PEP-0754
+   says that handling of +/-infinity and NaN is very implementation-dependent.
+   Also check how this values are displayed.
+ * Make the parser edition-proof.  For instance, editing a constant-pool string
+   should update the length field of it's entry, etc.  Sounds like a huge work.
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (
+        ParserError, FieldSet, StaticFieldSet,
+        Enum, RawBytes, PascalString16, Float32, Float64,
+        Int8, UInt8, Int16, UInt16, Int32, UInt32, Int64,
+        Bit, NullBits )
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.tools import paddingSize
+
+###############################################################################
+def parse_flags(flags, flags_dict, show_unknown_flags=True, separator=" "):
+    """
+    Parses an integer representing a set of flags.  The known flags are
+    stored with their bit-mask in a dictionnary.  Returns a string.
+    """
+    flags_list = []
+    mask = 0x01
+    while mask <= flags:
+        if flags & mask:
+            if mask in flags_dict:
+                flags_list.append(flags_dict[mask])
+            elif show_unknown_flags:
+                flags_list.append("???")
+        mask = mask << 1
+    return separator.join(flags_list)
+
+
+###############################################################################
+code_to_type_name = {
+    'B': "byte",
+    'C': "char",
+    'D': "double",
+    'F': "float",
+    'I': "int",
+    'J': "long",
+    'S': "short",
+    'Z': "boolean",
+    'V': "void",
+}
+
+def eat_descriptor(descr):
+    """
+    Read head of a field/method descriptor.  Returns a pair of strings, where
+    the first one is a human-readable string representation of the first found
+    type, and the second one is the tail of the parameter.
+    """
+    array_dim = 0
+    while descr[0] == '[':
+        array_dim += 1
+        descr = descr[1:]
+    if (descr[0] == 'L'):
+        try: end = descr.find(';')
+        except: raise ParserError("Not a valid descriptor string: " + descr)
+        type = descr[1:end]
+        descr = descr[end:]
+    else:
+        global code_to_type_name
+        try:
+            type = code_to_type_name[descr[0]]
+        except KeyError:
+            raise ParserError("Not a valid descriptor string: %s" % descr)
+    return (type.replace("/", ".") + array_dim * "[]", descr[1:])
+
+def parse_field_descriptor(descr, name=None):
+    """
+    Parse a field descriptor (single type), and returns it as human-readable
+    string representation.
+    """
+    assert descr
+    (type, tail) = eat_descriptor(descr)
+    assert not tail
+    if name:
+        return type + " " + name
+    else:
+        return type
+
+def parse_method_descriptor(descr, name=None):
+    """
+    Parse a method descriptor (params type and return type), and returns it
+    as human-readable string representation.
+    """
+    assert descr and (descr[0] == '(')
+    descr = descr[1:]
+    params_list = []
+    while descr[0] != ')':
+        (param, descr) = eat_descriptor(descr)
+        params_list.append(param)
+    (type, tail) = eat_descriptor(descr[1:])
+    assert not tail
+    params = ", ".join(params_list)
+    if name:
+        return "%s %s(%s)" % (type, name, params)
+    else:
+        return "%s (%s)" % (type, params)
+
+def parse_any_descriptor(descr, name=None):
+    """
+    Parse either a field or method descriptor, and returns it as human-
+    readable string representation.
+    """
+    assert descr
+    if descr[0] == '(':
+        return parse_method_descriptor(descr, name)
+    else:
+        return parse_field_descriptor(descr, name)
+
+
+###############################################################################
+class FieldArray(FieldSet):
+    """
+    Holds a fixed length array of fields which all have the same type.  This
+    type may be variable-length.  Each field will be named "foo[x]" (with x
+    starting at 0).
+    """
+    def __init__(self, parent, name, elements_class, length,
+            **elements_extra_args):
+        """Create a FieldArray of <length> fields of class <elements_class>,
+        named "<name>[x]".  The **elements_extra_args will be passed to the
+        constructor of each field when yielded."""
+        FieldSet.__init__(self, parent, name)
+        self.array_elements_class = elements_class
+        self.array_length = length
+        self.array_elements_extra_args = elements_extra_args
+
+    def createFields(self):
+        for i in range(0, self.array_length):
+            yield self.array_elements_class(self, "%s[%d]" % (self.name, i),
+                    **self.array_elements_extra_args)
+
+class ConstantPool(FieldSet):
+    """
+    ConstantPool is similar to a FieldArray of CPInfo fields, but:
+    - numbering starts at 1 instead of zero
+    - some indexes are skipped (after Long or Double entries)
+    """
+    def __init__(self, parent, name, length):
+        FieldSet.__init__(self, parent, name)
+        self.constant_pool_length = length
+    def createFields(self):
+        i = 1
+        while i < self.constant_pool_length:
+            name = "%s[%d]" % (self.name, i)
+            yield CPInfo(self, name)
+            i += 1
+            if self[name].constant_type in ("Long", "Double"):
+                i += 1
+
+
+###############################################################################
+class CPIndex(UInt16):
+    """
+    Holds index of a constant pool entry.
+    """
+    def __init__(self, parent, name, description=None, target_types=None,
+                target_text_handler=(lambda x: x), allow_zero=False):
+        """
+        Initialize a CPIndex.
+        - target_type is the tuple of expected type for the target CPInfo
+          (if None, then there will be no type check)
+        - target_text_handler is a string transformation function used for
+          pretty printing the target str() result
+        - allow_zero states whether null index is allowed (sometimes, constant
+          pool index is optionnal)
+        """
+        UInt16.__init__(self, parent, name, description)
+        if isinstance(target_types, str):
+            self.target_types = (target_types,)
+        else:
+            self.target_types = target_types
+        self.allow_zero = allow_zero
+        self.target_text_handler = target_text_handler
+        self.getOriginalDisplay = lambda: self.value
+
+    def createDisplay(self):
+        cp_entry = self.get_cp_entry()
+        if self.allow_zero and not cp_entry:
+            return "ZERO"
+        assert cp_entry
+        return "-> " + self.target_text_handler(str(cp_entry))
+
+    def get_cp_entry(self):
+        """
+        Returns the target CPInfo field.
+        """
+        assert self.value < self["/constant_pool_count"].value
+        if self.allow_zero and not self.value: return None
+        cp_entry = self["/constant_pool/constant_pool[%d]" % self.value]
+        assert isinstance(cp_entry, CPInfo)
+        if self.target_types:
+            assert cp_entry.constant_type in self.target_types
+        return cp_entry
+
+
+###############################################################################
+class JavaOpcode(FieldSet):
+    OPSIZE = 0
+    def __init__(self, parent, name, op, desc):
+        FieldSet.__init__(self, parent, name)
+        if self.OPSIZE != 0: self._size = self.OPSIZE*8
+        self.op = op
+        self.desc = desc
+    def createDisplay(self):
+        return self.op
+    def createDescription(self):
+        return self.desc
+    def createValue(self):
+        return self.createDisplay()
+
+class OpcodeNoArgs(JavaOpcode):
+    OPSIZE = 1
+    def createFields(self):
+        yield UInt8(self, "opcode")
+
+class OpcodeCPIndex(JavaOpcode):
+    OPSIZE = 3
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield CPIndex(self, "index")
+    def createDisplay(self):
+        return "%s(%i)"%(self.op, self["index"].value)
+        
+class OpcodeCPIndexShort(JavaOpcode):
+    OPSIZE = 2
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield UInt8(self, "index")
+    def createDisplay(self):
+        return "%s(%i)"%(self.op, self["index"].value)
+
+class OpcodeIndex(JavaOpcode):
+    OPSIZE = 2
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield UInt8(self, "index")
+    def createDisplay(self):
+        return "%s(%i)"%(self.op, self["index"].value)
+
+class OpcodeShortJump(JavaOpcode):
+    OPSIZE = 3
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield Int16(self, "offset")
+    def createDisplay(self):
+        return "%s(%s)"%(self.op, self["offset"].value)
+
+class OpcodeLongJump(JavaOpcode):
+    OPSIZE = 5
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield Int32(self, "offset")
+    def createDisplay(self):
+        return "%s(%s)"%(self.op, self["offset"].value)
+
+class OpcodeSpecial_bipush(JavaOpcode):
+    OPSIZE = 2
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield Int8(self, "value")
+    def createDisplay(self):
+        return "%s(%s)"%(self.op, self["value"].value)
+
+class OpcodeSpecial_sipush(JavaOpcode):
+    OPSIZE = 3
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield Int16(self, "value")
+    def createDisplay(self):
+        return "%s(%s)"%(self.op, self["value"].value)
+
+class OpcodeSpecial_iinc(JavaOpcode):
+    OPSIZE = 3
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield UInt8(self, "index")
+        yield Int8(self, "value")
+    def createDisplay(self):
+        return "%s(%i,%i)"%(self.op, self["index"].value, self["value"].value)
+
+class OpcodeSpecial_wide(JavaOpcode):
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        new_op = UInt8(self, "new_opcode")
+        yield new_op
+        op = new_op._description = JavaBytecode.OPCODE_TABLE.get(new_op.value, ["reserved", None, "Reserved"])[0]
+        yield UInt16(self, "index")
+        if op == "iinc":
+            yield Int16(self, "value")
+            self.createDisplay = lambda self: "%s(%i,%i)"%(self.op, self["index"].value, self["value"].value)
+        else:
+            self.createDisplay = lambda self: "%s(%i)"%(self.op, self["index"].value)
+
+class OpcodeSpecial_invokeinterface(JavaOpcode):
+    OPSIZE = 5
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield CPIndex(self, "index")
+        yield UInt8(self, "count")
+        yield UInt8(self, "zero", "Must be zero.")
+    def createDisplay(self):
+        return "%s(%i,%i,%i)"%(self.op, self["index"].value, self["count"].value, self["zero"].value)
+
+class OpcodeSpecial_newarray(JavaOpcode):
+    OPSIZE = 2
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield Enum(UInt8(self, "atype"), {4: "boolean",
+                                           5: "char",
+                                           6: "float",
+                                           7: "double",
+                                           8: "byte",
+                                           9: "short",
+                                           10:"int",
+                                           11:"long"})
+    def createDisplay(self):
+        return "%s(%s)"%(self.op, self["atype"].createDisplay())
+
+class OpcodeSpecial_multianewarray(JavaOpcode):
+    OPSIZE = 4
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        yield CPIndex(self, "index")
+        yield UInt8(self, "dimensions")
+    def createDisplay(self):
+        return "%s(%i,%i)"%(self.op, self["index"].value, self["dimensions"].value)
+
+class OpcodeSpecial_tableswitch(JavaOpcode):
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        pad = paddingSize(self.address+8, 32)
+        if pad:
+            yield NullBits(self, "padding", pad)
+        yield Int32(self, "default")
+        low = Int32(self, "low")
+        yield low
+        high = Int32(self, "high")
+        yield high
+        for i in range(high.value-low.value+1):
+            yield Int32(self, "offset[]")
+    def createDisplay(self):
+        return "%s(%i,%i,%i,...)"%(self.op, self["default"].value, self["low"].value, self["high"].value)
+
+class OpcodeSpecial_lookupswitch(JavaOpcode):
+    def createFields(self):
+        yield UInt8(self, "opcode")
+        pad = paddingSize(self.address+8, 32)
+        if pad:
+            yield NullBits(self, "padding", pad)
+        yield Int32(self, "default")
+        n = Int32(self, "npairs")
+        yield n
+        for i in range(n.value):
+            yield Int32(self, "match[]")
+            yield Int32(self, "offset[]")
+    def createDisplay(self):
+        return "%s(%i,%i,...)"%(self.op, self["default"].value, self["npairs"].value)
+
+class JavaBytecode(FieldSet):
+    OPCODE_TABLE = {
+0x00: ("nop", OpcodeNoArgs, "performs no operation. Stack: [No change]"),
+0x01: ("aconst_null", OpcodeNoArgs, "pushes a 'null' reference onto the stack. Stack: -> null"),
+0x02: ("iconst_m1", OpcodeNoArgs, "loads the int value -1 onto the stack. Stack: -> -1"),
+0x03: ("iconst_0", OpcodeNoArgs, "loads the int value 0 onto the stack. Stack: -> 0"),
+0x04: ("iconst_1", OpcodeNoArgs, "loads the int value 1 onto the stack. Stack: -> 1"),
+0x05: ("iconst_2", OpcodeNoArgs, "loads the int value 2 onto the stack. Stack: -> 2"),
+0x06: ("iconst_3", OpcodeNoArgs, "loads the int value 3 onto the stack. Stack: -> 3"),
+0x07: ("iconst_4", OpcodeNoArgs, "loads the int value 4 onto the stack. Stack: -> 4"),
+0x08: ("iconst_5", OpcodeNoArgs, "loads the int value 5 onto the stack. Stack: -> 5"),
+0x09: ("lconst_0", OpcodeNoArgs, "pushes the long 0 onto the stack. Stack: -> 0L"),
+0x0a: ("lconst_1", OpcodeNoArgs, "pushes the long 1 onto the stack. Stack: -> 1L"),
+0x0b: ("fconst_0", OpcodeNoArgs, "pushes '0.0f' onto the stack. Stack: -> 0.0f"),
+0x0c: ("fconst_1", OpcodeNoArgs, "pushes '1.0f' onto the stack. Stack: -> 1.0f"),
+0x0d: ("fconst_2", OpcodeNoArgs, "pushes '2.0f' onto the stack. Stack: -> 2.0f"),
+0x0e: ("dconst_0", OpcodeNoArgs, "pushes the constant '0.0' onto the stack. Stack: -> 0.0"),
+0x0f: ("dconst_1", OpcodeNoArgs, "pushes the constant '1.0' onto the stack. Stack: -> 1.0"),
+0x10: ("bipush", OpcodeSpecial_bipush, "pushes the signed 8-bit integer argument onto the stack. Stack: -> value"),
+0x11: ("sipush", OpcodeSpecial_sipush, "pushes the signed 16-bit integer argument onto the stack. Stack: -> value"),
+0x12: ("ldc", OpcodeCPIndexShort, "pushes a constant from a constant pool (String, int, float or class type) onto the stack. Stack: -> value"),
+0x13: ("ldc_w", OpcodeCPIndex, "pushes a constant from a constant pool (String, int, float or class type) onto the stack. Stack: -> value"),
+0x14: ("ldc2_w", OpcodeCPIndex, "pushes a constant from a constant pool (double or long) onto the stack. Stack: -> value"),
+0x15: ("iload", OpcodeIndex, "loads an int 'value' from a local variable '#index'. Stack: -> value"),
+0x16: ("lload", OpcodeIndex, "loads a long value from a local variable '#index'. Stack: -> value"),
+0x17: ("fload", OpcodeIndex, "loads a float 'value' from a local variable '#index'. Stack: -> value"),
+0x18: ("dload", OpcodeIndex, "loads a double 'value' from a local variable '#index'. Stack: -> value"),
+0x19: ("aload", OpcodeIndex, "loads a reference onto the stack from a local variable '#index'. Stack: -> objectref"),
+0x1a: ("iload_0", OpcodeNoArgs, "loads an int 'value' from variable 0. Stack: -> value"),
+0x1b: ("iload_1", OpcodeNoArgs, "loads an int 'value' from variable 1. Stack: -> value"),
+0x1c: ("iload_2", OpcodeNoArgs, "loads an int 'value' from variable 2. Stack: -> value"),
+0x1d: ("iload_3", OpcodeNoArgs, "loads an int 'value' from variable 3. Stack: -> value"),
+0x1e: ("lload_0", OpcodeNoArgs, "load a long value from a local variable 0. Stack: -> value"),
+0x1f: ("lload_1", OpcodeNoArgs, "load a long value from a local variable 1. Stack: -> value"),
+0x20: ("lload_2", OpcodeNoArgs, "load a long value from a local variable 2. Stack: -> value"),
+0x21: ("lload_3", OpcodeNoArgs, "load a long value from a local variable 3. Stack: -> value"),
+0x22: ("fload_0", OpcodeNoArgs, "loads a float 'value' from local variable 0. Stack: -> value"),
+0x23: ("fload_1", OpcodeNoArgs, "loads a float 'value' from local variable 1. Stack: -> value"),
+0x24: ("fload_2", OpcodeNoArgs, "loads a float 'value' from local variable 2. Stack: -> value"),
+0x25: ("fload_3", OpcodeNoArgs, "loads a float 'value' from local variable 3. Stack: -> value"),
+0x26: ("dload_0", OpcodeNoArgs, "loads a double from local variable 0. Stack: -> value"),
+0x27: ("dload_1", OpcodeNoArgs, "loads a double from local variable 1. Stack: -> value"),
+0x28: ("dload_2", OpcodeNoArgs, "loads a double from local variable 2. Stack: -> value"),
+0x29: ("dload_3", OpcodeNoArgs, "loads a double from local variable 3. Stack: -> value"),
+0x2a: ("aload_0", OpcodeNoArgs, "loads a reference onto the stack from local variable 0. Stack: -> objectref"),
+0x2b: ("aload_1", OpcodeNoArgs, "loads a reference onto the stack from local variable 1. Stack: -> objectref"),
+0x2c: ("aload_2", OpcodeNoArgs, "loads a reference onto the stack from local variable 2. Stack: -> objectref"),
+0x2d: ("aload_3", OpcodeNoArgs, "loads a reference onto the stack from local variable 3. Stack: -> objectref"),
+0x2e: ("iaload", OpcodeNoArgs, "loads an int from an array. Stack: arrayref, index -> value"),
+0x2f: ("laload", OpcodeNoArgs, "load a long from an array. Stack: arrayref, index -> value"),
+0x30: ("faload", OpcodeNoArgs, "loads a float from an array. Stack: arrayref, index -> value"),
+0x31: ("daload", OpcodeNoArgs, "loads a double from an array. Stack: arrayref, index -> value"),
+0x32: ("aaload", OpcodeNoArgs, "loads onto the stack a reference from an array. Stack: arrayref, index -> value"),
+0x33: ("baload", OpcodeNoArgs, "loads a byte or Boolean value from an array. Stack: arrayref, index -> value"),
+0x34: ("caload", OpcodeNoArgs, "loads a char from an array. Stack: arrayref, index -> value"),
+0x35: ("saload", OpcodeNoArgs, "load short from array. Stack: arrayref, index -> value"),
+0x36: ("istore", OpcodeIndex, "store int 'value' into variable '#index'. Stack: value ->"),
+0x37: ("lstore", OpcodeIndex, "store a long 'value' in a local variable '#index'. Stack: value ->"),
+0x38: ("fstore", OpcodeIndex, "stores a float 'value' into a local variable '#index'. Stack: value ->"),
+0x39: ("dstore", OpcodeIndex, "stores a double 'value' into a local variable '#index'. Stack: value ->"),
+0x3a: ("astore", OpcodeIndex, "stores a reference into a local variable '#index'. Stack: objectref ->"),
+0x3b: ("istore_0", OpcodeNoArgs, "store int 'value' into variable 0. Stack: value ->"),
+0x3c: ("istore_1", OpcodeNoArgs, "store int 'value' into variable 1. Stack: value ->"),
+0x3d: ("istore_2", OpcodeNoArgs, "store int 'value' into variable 2. Stack: value ->"),
+0x3e: ("istore_3", OpcodeNoArgs, "store int 'value' into variable 3. Stack: value ->"),
+0x3f: ("lstore_0", OpcodeNoArgs, "store a long 'value' in a local variable 0. Stack: value ->"),
+0x40: ("lstore_1", OpcodeNoArgs, "store a long 'value' in a local variable 1. Stack: value ->"),
+0x41: ("lstore_2", OpcodeNoArgs, "store a long 'value' in a local variable 2. Stack: value ->"),
+0x42: ("lstore_3", OpcodeNoArgs, "store a long 'value' in a local variable 3. Stack: value ->"),
+0x43: ("fstore_0", OpcodeNoArgs, "stores a float 'value' into local variable 0. Stack: value ->"),
+0x44: ("fstore_1", OpcodeNoArgs, "stores a float 'value' into local variable 1. Stack: value ->"),
+0x45: ("fstore_2", OpcodeNoArgs, "stores a float 'value' into local variable 2. Stack: value ->"),
+0x46: ("fstore_3", OpcodeNoArgs, "stores a float 'value' into local variable 3. Stack: value ->"),
+0x47: ("dstore_0", OpcodeNoArgs, "stores a double into local variable 0. Stack: value ->"),
+0x48: ("dstore_1", OpcodeNoArgs, "stores a double into local variable 1. Stack: value ->"),
+0x49: ("dstore_2", OpcodeNoArgs, "stores a double into local variable 2. Stack: value ->"),
+0x4a: ("dstore_3", OpcodeNoArgs, "stores a double into local variable 3. Stack: value ->"),
+0x4b: ("astore_0", OpcodeNoArgs, "stores a reference into local variable 0. Stack: objectref ->"),
+0x4c: ("astore_1", OpcodeNoArgs, "stores a reference into local variable 1. Stack: objectref ->"),
+0x4d: ("astore_2", OpcodeNoArgs, "stores a reference into local variable 2. Stack: objectref ->"),
+0x4e: ("astore_3", OpcodeNoArgs, "stores a reference into local variable 3. Stack: objectref ->"),
+0x4f: ("iastore", OpcodeNoArgs, "stores an int into an array. Stack: arrayref, index, value ->"),
+0x50: ("lastore", OpcodeNoArgs, "store a long to an array. Stack: arrayref, index, value ->"),
+0x51: ("fastore", OpcodeNoArgs, "stores a float in an array. Stack: arreyref, index, value ->"),
+0x52: ("dastore", OpcodeNoArgs, "stores a double into an array. Stack: arrayref, index, value ->"),
+0x53: ("aastore", OpcodeNoArgs, "stores into a reference to an array. Stack: arrayref, index, value ->"),
+0x54: ("bastore", OpcodeNoArgs, "stores a byte or Boolean value into an array. Stack: arrayref, index, value ->"),
+0x55: ("castore", OpcodeNoArgs, "stores a char into an array. Stack: arrayref, index, value ->"),
+0x56: ("sastore", OpcodeNoArgs, "store short to array. Stack: arrayref, index, value ->"),
+0x57: ("pop", OpcodeNoArgs, "discards the top value on the stack. Stack: value ->"),
+0x58: ("pop2", OpcodeNoArgs, "discards the top two values on the stack (or one value, if it is a double or long). Stack: {value2, value1} ->"),
+0x59: ("dup", OpcodeNoArgs, "duplicates the value on top of the stack. Stack: value -> value, value"),
+0x5a: ("dup_x1", OpcodeNoArgs, "inserts a copy of the top value into the stack two values from the top. Stack: value2, value1 -> value1, value2, value1"),
+0x5b: ("dup_x2", OpcodeNoArgs, "inserts a copy of the top value into the stack two (if value2 is double or long it takes up the entry of value3, too) or three values (if value2 is neither double nor long) from the top. Stack: value3, value2, value1 -> value1, value3, value2, value1"),
+0x5c: ("dup2", OpcodeNoArgs, "duplicate top two stack words (two values, if value1 is not double nor long; a single value, if value1 is double or long). Stack: {value2, value1} -> {value2, value1}, {value2, value1}"),
+0x5d: ("dup2_x1", OpcodeNoArgs, "duplicate two words and insert beneath third word. Stack: value3, {value2, value1} -> {value2, value1}, value3, {value2, value1}"),
+0x5e: ("dup2_x2", OpcodeNoArgs, "duplicate two words and insert beneath fourth word. Stack: {value4, value3}, {value2, value1} -> {value2, value1}, {value4, value3}, {value2, value1}"),
+0x5f: ("swap", OpcodeNoArgs, "swaps two top words on the stack (note that value1 and value2 must not be double or long). Stack: value2, value1 -> value1, value2"),
+0x60: ("iadd", OpcodeNoArgs, "adds two ints together. Stack: value1, value2 -> result"),
+0x61: ("ladd", OpcodeNoArgs, "add two longs. Stack: value1, value2 -> result"),
+0x62: ("fadd", OpcodeNoArgs, "adds two floats. Stack: value1, value2 -> result"),
+0x63: ("dadd", OpcodeNoArgs, "adds two doubles. Stack: value1, value2 -> result"),
+0x64: ("isub", OpcodeNoArgs, "int subtract. Stack: value1, value2 -> result"),
+0x65: ("lsub", OpcodeNoArgs, "subtract two longs. Stack: value1, value2 -> result"),
+0x66: ("fsub", OpcodeNoArgs, "subtracts two floats. Stack: value1, value2 -> result"),
+0x67: ("dsub", OpcodeNoArgs, "subtracts a double from another. Stack: value1, value2 -> result"),
+0x68: ("imul", OpcodeNoArgs, "multiply two integers. Stack: value1, value2 -> result"),
+0x69: ("lmul", OpcodeNoArgs, "multiplies two longs. Stack: value1, value2 -> result"),
+0x6a: ("fmul", OpcodeNoArgs, "multiplies two floats. Stack: value1, value2 -> result"),
+0x6b: ("dmul", OpcodeNoArgs, "multiplies two doubles. Stack: value1, value2 -> result"),
+0x6c: ("idiv", OpcodeNoArgs, "divides two integers. Stack: value1, value2 -> result"),
+0x6d: ("ldiv", OpcodeNoArgs, "divide two longs. Stack: value1, value2 -> result"),
+0x6e: ("fdiv", OpcodeNoArgs, "divides two floats. Stack: value1, value2 -> result"),
+0x6f: ("ddiv", OpcodeNoArgs, "divides two doubles. Stack: value1, value2 -> result"),
+0x70: ("irem", OpcodeNoArgs, "logical int remainder. Stack: value1, value2 -> result"),
+0x71: ("lrem", OpcodeNoArgs, "remainder of division of two longs. Stack: value1, value2 -> result"),
+0x72: ("frem", OpcodeNoArgs, "gets the remainder from a division between two floats. Stack: value1, value2 -> result"),
+0x73: ("drem", OpcodeNoArgs, "gets the remainder from a division between two doubles. Stack: value1, value2 -> result"),
+0x74: ("ineg", OpcodeNoArgs, "negate int. Stack: value -> result"),
+0x75: ("lneg", OpcodeNoArgs, "negates a long. Stack: value -> result"),
+0x76: ("fneg", OpcodeNoArgs, "negates a float. Stack: value -> result"),
+0x77: ("dneg", OpcodeNoArgs, "negates a double. Stack: value -> result"),
+0x78: ("ishl", OpcodeNoArgs, "int shift left. Stack: value1, value2 -> result"),
+0x79: ("lshl", OpcodeNoArgs, "bitwise shift left of a long 'value1' by 'value2' positions. Stack: value1, value2 -> result"),
+0x7a: ("ishr", OpcodeNoArgs, "int shift right. Stack: value1, value2 -> result"),
+0x7b: ("lshr", OpcodeNoArgs, "bitwise shift right of a long 'value1' by 'value2' positions. Stack: value1, value2 -> result"),
+0x7c: ("iushr", OpcodeNoArgs, "int shift right. Stack: value1, value2 -> result"),
+0x7d: ("lushr", OpcodeNoArgs, "bitwise shift right of a long 'value1' by 'value2' positions, unsigned. Stack: value1, value2 -> result"),
+0x7e: ("iand", OpcodeNoArgs, "performs a logical and on two integers. Stack: value1, value2 -> result"),
+0x7f: ("land", OpcodeNoArgs, "bitwise and of two longs. Stack: value1, value2 -> result"),
+0x80: ("ior", OpcodeNoArgs, "logical int or. Stack: value1, value2 -> result"),
+0x81: ("lor", OpcodeNoArgs, "bitwise or of two longs. Stack: value1, value2 -> result"),
+0x82: ("ixor", OpcodeNoArgs, "int xor. Stack: value1, value2 -> result"),
+0x83: ("lxor", OpcodeNoArgs, "bitwise exclusive or of two longs. Stack: value1, value2 -> result"),
+0x84: ("iinc", OpcodeSpecial_iinc, "increment local variable '#index' by signed byte 'const'. Stack: [No change]"),
+0x85: ("i2l", OpcodeNoArgs, "converts an int into a long. Stack: value -> result"),
+0x86: ("i2f", OpcodeNoArgs, "converts an int into a float. Stack: value -> result"),
+0x87: ("i2d", OpcodeNoArgs, "converts an int into a double. Stack: value -> result"),
+0x88: ("l2i", OpcodeNoArgs, "converts a long to an int. Stack: value -> result"),
+0x89: ("l2f", OpcodeNoArgs, "converts a long to a float. Stack: value -> result"),
+0x8a: ("l2d", OpcodeNoArgs, "converts a long to a double. Stack: value -> result"),
+0x8b: ("f2i", OpcodeNoArgs, "converts a float to an int. Stack: value -> result"),
+0x8c: ("f2l", OpcodeNoArgs, "converts a float to a long. Stack: value -> result"),
+0x8d: ("f2d", OpcodeNoArgs, "converts a float to a double. Stack: value -> result"),
+0x8e: ("d2i", OpcodeNoArgs, "converts a double to an int. Stack: value -> result"),
+0x8f: ("d2l", OpcodeNoArgs, "converts a double to a long. Stack: value -> result"),
+0x90: ("d2f", OpcodeNoArgs, "converts a double to a float. Stack: value -> result"),
+0x91: ("i2b", OpcodeNoArgs, "converts an int into a byte. Stack: value -> result"),
+0x92: ("i2c", OpcodeNoArgs, "converts an int into a character. Stack: value -> result"),
+0x93: ("i2s", OpcodeNoArgs, "converts an int into a short. Stack: value -> result"),
+0x94: ("lcmp", OpcodeNoArgs, "compares two longs values. Stack: value1, value2 -> result"),
+0x95: ("fcmpl", OpcodeNoArgs, "compares two floats. Stack: value1, value2 -> result"),
+0x96: ("fcmpg", OpcodeNoArgs, "compares two floats. Stack: value1, value2 -> result"),
+0x97: ("dcmpl", OpcodeNoArgs, "compares two doubles. Stack: value1, value2 -> result"),
+0x98: ("dcmpg", OpcodeNoArgs, "compares two doubles. Stack: value1, value2 -> result"),
+0x99: ("ifeq", OpcodeShortJump, "if 'value' is 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
+0x9a: ("ifne", OpcodeShortJump, "if 'value' is not 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
+0x9c: ("ifge", OpcodeShortJump, "if 'value' is greater than or equal to 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
+0x9d: ("ifgt", OpcodeShortJump, "if 'value' is greater than 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
+0x9e: ("ifle", OpcodeShortJump, "if 'value' is less than or equal to 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
+0x9f: ("if_icmpeq", OpcodeShortJump, "if ints are equal, branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
+0xa0: ("if_icmpne", OpcodeShortJump, "if ints are not equal, branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
+0xa1: ("if_icmplt", OpcodeShortJump, "if 'value1' is less than 'value2', branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
+0xa2: ("if_icmpge", OpcodeShortJump, "if 'value1' is greater than or equal to 'value2', branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
+0xa3: ("if_icmpgt", OpcodeShortJump, "if 'value1' is greater than 'value2', branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
+0xa4: ("if_icmple", OpcodeShortJump, "if 'value1' is less than or equal to 'value2', branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
+0xa5: ("if_acmpeq", OpcodeShortJump, "if references are equal, branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
+0xa6: ("if_acmpne", OpcodeShortJump, "if references are not equal, branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
+0xa7: ("goto", OpcodeShortJump, "goes to the 16-bit instruction offset argument. Stack: [no change]"),
+0xa8: ("jsr", OpcodeShortJump, "jump to subroutine at the 16-bit instruction offset argument and place the return address on the stack. Stack: -> address"),
+0xa9: ("ret", OpcodeIndex, "continue execution from address taken from a local variable '#index'. Stack: [No change]"),
+0xaa: ("tableswitch", OpcodeSpecial_tableswitch, "continue execution from an address in the table at offset 'index'. Stack: index ->"),
+0xab: ("lookupswitch", OpcodeSpecial_lookupswitch, "a target address is looked up from a table using a key and execution continues from the instruction at that address. Stack: key ->"),
+0xac: ("ireturn", OpcodeNoArgs, "returns an integer from a method. Stack: value -> [empty]"),
+0xad: ("lreturn", OpcodeNoArgs, "returns a long value. Stack: value -> [empty]"),
+0xae: ("freturn", OpcodeNoArgs, "returns a float. Stack: value -> [empty]"),
+0xaf: ("dreturn", OpcodeNoArgs, "returns a double from a method. Stack: value -> [empty]"),
+0xb0: ("areturn", OpcodeNoArgs, "returns a reference from a method. Stack: objectref -> [empty]"),
+0xb1: ("return", OpcodeNoArgs, "return void from method. Stack: -> [empty]"),
+0xb2: ("getstatic", OpcodeCPIndex, "gets a static field 'value' of a class, where the field is identified by field reference in the constant pool. Stack: -> value"),
+0xb3: ("putstatic", OpcodeCPIndex, "set static field to 'value' in a class, where the field is identified by a field reference in constant pool. Stack: value ->"),
+0xb4: ("getfield", OpcodeCPIndex, "gets a field 'value' of an object 'objectref', where the field is identified by field reference <argument> in the constant pool. Stack: objectref -> value"),
+0xb5: ("putfield", OpcodeCPIndex, "set field to 'value' in an object 'objectref', where the field is identified by a field reference <argument> in constant pool. Stack: objectref, value ->"),
+0xb6: ("invokevirtual", OpcodeCPIndex, "invoke virtual method on object 'objectref', where the method is identified by method reference <argument> in constant pool. Stack: objectref, [arg1, arg2, ...] ->"),
+0xb7: ("invokespecial", OpcodeCPIndex, "invoke instance method on object 'objectref', where the method is identified by method reference <argument> in constant pool. Stack: objectref, [arg1, arg2, ...] ->"),
+0xb8: ("invokestatic", OpcodeCPIndex, "invoke a static method, where the method is identified by method reference <argument> in the constant pool. Stack: [arg1, arg2, ...] ->"),
+0xb9: ("invokeinterface", OpcodeSpecial_invokeinterface, "invokes an interface method on object 'objectref', where the interface method is identified by method reference <argument> in constant pool. Stack: objectref, [arg1, arg2, ...] ->"),
+0xba: ("xxxunusedxxx", OpcodeNoArgs, "this opcode is reserved for historical reasons. Stack: "),
+0xbb: ("new", OpcodeCPIndex, "creates new object of type identified by class reference <argument> in constant pool. Stack: -> objectref"),
+0xbc: ("newarray", OpcodeSpecial_newarray, "creates new array with 'count' elements of primitive type given in the argument. Stack: count -> arrayref"),
+0xbd: ("anewarray", OpcodeCPIndex, "creates a new array of references of length 'count' and component type identified by the class reference <argument> in the constant pool. Stack: count -> arrayref"),
+0xbe: ("arraylength", OpcodeNoArgs, "gets the length of an array. Stack: arrayref -> length"),
+0xbf: ("athrow", OpcodeNoArgs, "throws an error or exception (notice that the rest of the stack is cleared, leaving only a reference to the Throwable). Stack: objectref -> [empty], objectref"),
+0xc0: ("checkcast", OpcodeCPIndex, "checks whether an 'objectref' is of a certain type, the class reference of which is in the constant pool. Stack: objectref -> objectref"),
+0xc1: ("instanceof", OpcodeCPIndex, "determines if an object 'objectref' is of a given type, identified by class reference <argument> in constant pool. Stack: objectref -> result"),
+0xc2: ("monitorenter", OpcodeNoArgs, "enter monitor for object (\"grab the lock\" - start of synchronized() section). Stack: objectref -> "),
+0xc3: ("monitorexit", OpcodeNoArgs, "exit monitor for object (\"release the lock\" - end of synchronized() section). Stack: objectref -> "),
+0xc4: ("wide", OpcodeSpecial_wide, "execute 'opcode', where 'opcode' is either iload, fload, aload, lload, dload, istore, fstore, astore, lstore, dstore, or ret, but assume the 'index' is 16 bit; or execute iinc, where the 'index' is 16 bits and the constant to increment by is a signed 16 bit short. Stack: [same as for corresponding instructions]"),
+0xc5: ("multianewarray", OpcodeSpecial_multianewarray, "create a new array of 'dimensions' dimensions with elements of type identified by class reference in constant pool; the sizes of each dimension is identified by 'count1', ['count2', etc]. Stack: count1, [count2,...] -> arrayref"),
+0xc6: ("ifnull", OpcodeShortJump, "if 'value' is null, branch to the 16-bit instruction offset argument. Stack: value ->"),
+0xc7: ("ifnonnull", OpcodeShortJump, "if 'value' is not null, branch to the 16-bit instruction offset argument. Stack: value ->"),
+0xc8: ("goto_w", OpcodeLongJump, "goes to another instruction at the 32-bit branch offset argument. Stack: [no change]"),
+0xc9: ("jsr_w", OpcodeLongJump, "jump to subroutine at the 32-bit branch offset argument and place the return address on the stack. Stack: -> address"),
+0xca: ("breakpoint", OpcodeNoArgs, "reserved for breakpoints in Java debuggers; should not appear in any class file."),
+0xfe: ("impdep1", OpcodeNoArgs, "reserved for implementation-dependent operations within debuggers; should not appear in any class file."),
+0xff: ("impdep2", OpcodeNoArgs, "reserved for implementation-dependent operations within debuggers; should not appear in any class file.")}
+    def __init__(self, parent, name, length):
+        FieldSet.__init__(self, parent, name)
+        self._size = length*8
+    def createFields(self):
+        while self.current_size < self.size:
+            bytecode = ord(self.parent.stream.readBytes(self.absolute_address+self.current_size, 1))
+            op, cls, desc = self.OPCODE_TABLE.get(bytecode,["<reserved_opcode>", OpcodeNoArgs, "Reserved opcode."])
+            yield cls(self, "bytecode[]", op, desc)
+
+###############################################################################
+class CPInfo(FieldSet):
+    """
+    Holds a constant pool entry.  Entries all have a type, and various contents
+    fields depending on their type.
+    """
+    def createFields(self):
+        yield Enum(UInt8(self, "tag"), self.root.CONSTANT_TYPES)
+        if self["tag"].value not in self.root.CONSTANT_TYPES:
+            raise ParserError("Java: unknown constant type (%s)" % self["tag"].value)
+        self.constant_type = self.root.CONSTANT_TYPES[self["tag"].value]
+        if self.constant_type == "Utf8":
+            yield PascalString16(self, "bytes", charset="UTF-8")
+        elif self.constant_type == "Integer":
+            yield Int32(self, "bytes")
+        elif self.constant_type == "Float":
+            yield Float32(self, "bytes")
+        elif self.constant_type == "Long":
+            yield Int64(self, "bytes")
+        elif self.constant_type == "Double":
+            yield Float64(self, "bytes")
+        elif self.constant_type == "Class":
+            yield CPIndex(self, "name_index", "Class or interface name", target_types="Utf8")
+        elif self.constant_type == "String":
+            yield CPIndex(self, "string_index", target_types="Utf8")
+        elif self.constant_type == "Fieldref":
+            yield CPIndex(self, "class_index", "Field class or interface name", target_types="Class")
+            yield CPIndex(self, "name_and_type_index", target_types="NameAndType")
+        elif self.constant_type == "Methodref":
+            yield CPIndex(self, "class_index", "Method class name", target_types="Class")
+            yield CPIndex(self, "name_and_type_index", target_types="NameAndType")
+        elif self.constant_type == "InterfaceMethodref":
+            yield CPIndex(self, "class_index", "Method interface name", target_types="Class")
+            yield CPIndex(self, "name_and_type_index", target_types="NameAndType")
+        elif self.constant_type == "NameAndType":
+            yield CPIndex(self, "name_index", target_types="Utf8")
+            yield CPIndex(self, "descriptor_index", target_types="Utf8")
+        else:
+            raise ParserError("Not a valid constant pool element type: "
+                    + self["tag"].value)
+
+    def __str__(self):
+        """
+        Returns a human-readable string representation of the constant pool
+        entry.  It is used for pretty-printing of the CPIndex fields pointing
+        to it.
+        """
+        if self.constant_type == "Utf8":
+            return self["bytes"].value
+        elif self.constant_type in ("Integer", "Float", "Long", "Double"):
+            return self["bytes"].display
+        elif self.constant_type == "Class":
+            class_name = str(self["name_index"].get_cp_entry())
+            return class_name.replace("/",".")
+        elif self.constant_type == "String":
+            return str(self["string_index"].get_cp_entry())
+        elif self.constant_type == "Fieldref":
+            return "%s (from %s)" % (self["name_and_type_index"], self["class_index"])
+        elif self.constant_type == "Methodref":
+            return "%s (from %s)" % (self["name_and_type_index"], self["class_index"])
+        elif self.constant_type == "InterfaceMethodref":
+             return "%s (from %s)" % (self["name_and_type_index"], self["class_index"])
+        elif self.constant_type == "NameAndType":
+            return parse_any_descriptor(
+                    str(self["descriptor_index"].get_cp_entry()),
+                    name=str(self["name_index"].get_cp_entry()))
+        else:
+            # FIXME: Return "<error>" instead of raising an exception?
+            raise ParserError("Not a valid constant pool element type: "
+                    + self["tag"].value)
+
+
+###############################################################################
+# field_info {
+#        u2 access_flags;
+#        u2 name_index;
+#        u2 descriptor_index;
+#        u2 attributes_count;
+#        attribute_info attributes[attributes_count];
+# }
+class FieldInfo(FieldSet):
+    def createFields(self):
+        # Access flags (16 bits)
+        yield NullBits(self, "reserved[]", 8)
+        yield Bit(self, "transient")
+        yield Bit(self, "volatile")
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "final")
+        yield Bit(self, "static")
+        yield Bit(self, "protected")
+        yield Bit(self, "private")
+        yield Bit(self, "public")
+
+        yield CPIndex(self, "name_index", "Field name", target_types="Utf8")
+        yield CPIndex(self, "descriptor_index", "Field descriptor", target_types="Utf8",
+                target_text_handler=parse_field_descriptor)
+        yield UInt16(self, "attributes_count", "Number of field attributes")
+        if self["attributes_count"].value > 0:
+            yield FieldArray(self, "attributes", AttributeInfo,
+                    self["attributes_count"].value)
+
+
+###############################################################################
+# method_info {
+#        u2 access_flags;
+#        u2 name_index;
+#        u2 descriptor_index;
+#        u2 attributes_count;
+#        attribute_info attributes[attributes_count];
+# }
+class MethodInfo(FieldSet):
+    def createFields(self):
+        # Access flags (16 bits)
+        yield NullBits(self, "reserved[]", 4)
+        yield Bit(self, "strict")
+        yield Bit(self, "abstract")
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "native")
+        yield NullBits(self, "reserved[]", 2)
+        yield Bit(self, "synchronized")
+        yield Bit(self, "final")
+        yield Bit(self, "static")
+        yield Bit(self, "protected")
+        yield Bit(self, "private")
+        yield Bit(self, "public")
+
+        yield CPIndex(self, "name_index", "Method name", target_types="Utf8")
+        yield CPIndex(self, "descriptor_index", "Method descriptor",
+                target_types="Utf8",
+                target_text_handler=parse_method_descriptor)
+        yield UInt16(self, "attributes_count", "Number of method attributes")
+        if self["attributes_count"].value > 0:
+            yield FieldArray(self, "attributes", AttributeInfo,
+                    self["attributes_count"].value)
+
+
+###############################################################################
+# attribute_info {
+#        u2 attribute_name_index;
+#        u4 attribute_length;
+#        u1 info[attribute_length];
+# }
+# [...]
+class AttributeInfo(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        self._size = (self["attribute_length"].value + 6) * 8
+
+    def createFields(self):
+        yield CPIndex(self, "attribute_name_index", "Attribute name", target_types="Utf8")
+        yield UInt32(self, "attribute_length", "Length of the attribute")
+        attr_name = str(self["attribute_name_index"].get_cp_entry())
+
+        # ConstantValue_attribute {
+        #   u2 attribute_name_index;
+        #   u4 attribute_length;
+        #   u2 constantvalue_index;
+        # }
+        if attr_name == "ConstantValue":
+            if self["attribute_length"].value != 2:
+                    raise ParserError("Java: Invalid attribute %s length (%s)" \
+                        % (self.path, self["attribute_length"].value))
+            yield CPIndex(self, "constantvalue_index",
+                    target_types=("Long","Float","Double","Integer","String"))
+
+        # Code_attribute {
+        #   u2 attribute_name_index;
+        #   u4 attribute_length;
+        #   u2 max_stack;
+        #   u2 max_locals;
+        #   u4 code_length;
+        #   u1 code[code_length];
+        #   u2 exception_table_length;
+        #   {   u2 start_pc;
+        #       u2 end_pc;
+        #       u2  handler_pc;
+        #       u2  catch_type;
+        #   } exception_table[exception_table_length];
+        #   u2 attributes_count;
+        #   attribute_info attributes[attributes_count];
+        # }
+        elif attr_name == "Code":
+            yield UInt16(self, "max_stack")
+            yield UInt16(self, "max_locals")
+            yield UInt32(self, "code_length")
+            if self["code_length"].value > 0:
+                yield JavaBytecode(self, "code", self["code_length"].value)
+            yield UInt16(self, "exception_table_length")
+            if self["exception_table_length"].value > 0:
+                yield FieldArray(self, "exception_table", ExceptionTableEntry,
+                        self["exception_table_length"].value)
+            yield UInt16(self, "attributes_count")
+            if self["attributes_count"].value > 0:
+                yield FieldArray(self, "attributes", AttributeInfo,
+                        self["attributes_count"].value)
+
+        # Exceptions_attribute {
+        #   u2 attribute_name_index;
+        #   u4 attribute_length;
+        #   u2 number_of_exceptions;
+        #   u2 exception_index_table[number_of_exceptions];
+        # }
+        elif (attr_name == "Exceptions"):
+            yield UInt16(self, "number_of_exceptions")
+            yield FieldArray(self, "exception_index_table", CPIndex,
+                    self["number_of_exceptions"].value, target_types="Class")
+            assert self["attribute_length"].value == \
+                2 + self["number_of_exceptions"].value * 2
+
+        # InnerClasses_attribute {
+        #   u2 attribute_name_index;
+        #   u4 attribute_length;
+        #   u2 number_of_classes;
+        #   {   u2 inner_class_info_index;
+        #       u2 outer_class_info_index;
+        #       u2 inner_name_index;
+        #       u2 inner_class_access_flags;
+        #   } classes[number_of_classes];
+        # }
+        elif (attr_name == "InnerClasses"):
+            yield UInt16(self, "number_of_classes")
+            if self["number_of_classes"].value > 0:
+                yield FieldArray(self, "classes", InnerClassesEntry,
+                       self["number_of_classes"].value)
+            assert self["attribute_length"].value == \
+                2 + self["number_of_classes"].value * 8
+
+        # Synthetic_attribute {
+        #   u2 attribute_name_index;
+        #   u4 attribute_length;
+        # }
+        elif (attr_name == "Synthetic"):
+            assert self["attribute_length"].value == 0
+
+        # SourceFile_attribute {
+        #   u2 attribute_name_index;
+        #   u4 attribute_length;
+        #   u2 sourcefile_index;
+        # }
+        elif (attr_name == "SourceFile"):
+            assert self["attribute_length"].value == 2
+            yield CPIndex(self, "sourcefile_index", target_types="Utf8")
+
+        # LineNumberTable_attribute {
+        #   u2 attribute_name_index;
+        #   u4 attribute_length;
+        #   u2 line_number_table_length;
+        #   {   u2 start_pc;
+        #       u2 line_number;
+        #   } line_number_table[line_number_table_length];
+        # }
+        elif (attr_name == "LineNumberTable"):
+            yield UInt16(self, "line_number_table_length")
+            if self["line_number_table_length"].value > 0:
+                yield FieldArray(self, "line_number_table",
+                        LineNumberTableEntry,
+                        self["line_number_table_length"].value)
+            assert self["attribute_length"].value == \
+                    2 + self["line_number_table_length"].value * 4
+
+        # LocalVariableTable_attribute {
+        #   u2 attribute_name_index;
+        #   u4 attribute_length;
+        #   u2 local_variable_table_length;
+        #   {   u2 start_pc;
+        #       u2 length;
+        #       u2 name_index;
+        #       u2 descriptor_index;
+        #       u2 index;
+        #   } local_variable_table[local_variable_table_length];
+        # }
+        elif (attr_name == "LocalVariableTable"):
+            yield UInt16(self, "local_variable_table_length")
+            if self["local_variable_table_length"].value > 0:
+                yield FieldArray(self, "local_variable_table",
+                        LocalVariableTableEntry,
+                        self["local_variable_table_length"].value)
+            assert self["attribute_length"].value == \
+                    2 + self["local_variable_table_length"].value * 10
+
+        # Deprecated_attribute {
+        #   u2 attribute_name_index;
+        #   u4 attribute_length;
+        # }
+        elif (attr_name == "Deprecated"):
+            assert self["attribute_length"].value == 0
+
+        # Unkown attribute type.  They are allowed by the JVM specs, but we
+        # can't say much about them...
+        elif self["attribute_length"].value > 0:
+            yield RawBytes(self, "info", self["attribute_length"].value)
+
+class ExceptionTableEntry(FieldSet):
+    static_size = 48 + CPIndex.static_size
+
+    def createFields(self):
+        yield textHandler(UInt16(self, "start_pc"), hexadecimal)
+        yield textHandler(UInt16(self, "end_pc"), hexadecimal)
+        yield textHandler(UInt16(self, "handler_pc"), hexadecimal)
+        yield CPIndex(self, "catch_type", target_types="Class")
+
+class InnerClassesEntry(StaticFieldSet):
+    format = (
+        (CPIndex, "inner_class_info_index",
+                {"target_types": "Class", "allow_zero": True}),
+        (CPIndex, "outer_class_info_index",
+                {"target_types": "Class", "allow_zero": True}),
+        (CPIndex, "inner_name_index",
+                {"target_types": "Utf8", "allow_zero": True}),
+
+        # Inner class access flags (16 bits)
+        (NullBits, "reserved[]", 5),
+        (Bit, "abstract"),
+        (Bit, "interface"),
+        (NullBits, "reserved[]", 3),
+        (Bit, "super"),
+        (Bit, "final"),
+        (Bit, "static"),
+        (Bit, "protected"),
+        (Bit, "private"),
+        (Bit, "public"),
+    )
+
+class LineNumberTableEntry(StaticFieldSet):
+    format = (
+        (UInt16, "start_pc"),
+        (UInt16, "line_number")
+    )
+
+class LocalVariableTableEntry(StaticFieldSet):
+    format = (
+        (UInt16, "start_pc"),
+        (UInt16, "length"),
+        (CPIndex, "name_index", {"target_types": "Utf8"}),
+        (CPIndex, "descriptor_index", {"target_types": "Utf8",
+                "target_text_handler": parse_field_descriptor}),
+        (UInt16, "index")
+    )
+
+
+###############################################################################
+# ClassFile {
+#        u4 magic;
+#        u2 minor_version;
+#        u2 major_version;
+#        u2 constant_pool_count;
+#        cp_info constant_pool[constant_pool_count-1];
+#        u2 access_flags;
+#        u2 this_class;
+#        u2 super_class;
+#        u2 interfaces_count;
+#        u2 interfaces[interfaces_count];
+#        u2 fields_count;
+#        field_info fields[fields_count];
+#        u2 methods_count;
+#        method_info methods[methods_count];
+#        u2 attributes_count;
+#        attribute_info attributes[attributes_count];
+# }
+class JavaCompiledClassFile(Parser):
+    """
+    Root of the .class parser.
+    """
+
+    endian = BIG_ENDIAN
+
+    PARSER_TAGS = {
+        "id": "java_class",
+        "category": "program",
+        "file_ext": ("class",),
+        "mime": (u"application/java-vm",),
+        "min_size": (32 + 3*16),
+        "description": "Compiled Java class"
+    }
+
+    MAGIC = 0xCAFEBABE
+    KNOWN_VERSIONS = {
+        "45.3": "JDK 1.1",
+        "46.0": "JDK 1.2",
+        "47.0": "JDK 1.3",
+        "48.0": "JDK 1.4",
+        "49.0": "JDK 1.5",
+        "50.0": "JDK 1.6"
+    }
+
+    # Constants go here since they will probably depend on the detected format
+    # version at some point.  Though, if they happen to be really backward
+    # compatible, they may become module globals.
+    CONSTANT_TYPES = {
+         1: "Utf8",
+         3: "Integer",
+         4: "Float",
+         5: "Long",
+         6: "Double",
+         7: "Class",
+         8: "String",
+         9: "Fieldref",
+        10: "Methodref",
+        11: "InterfaceMethodref",
+        12: "NameAndType"
+    }
+
+    def validate(self):
+        if self["magic"].value != self.MAGIC:
+            return "Wrong magic signature!"
+        version = "%d.%d" % (self["major_version"].value, self["minor_version"].value)
+        if version not in self.KNOWN_VERSIONS:
+            return "Unknown version (%s)" % version
+        return True
+
+    def createDescription(self):
+        version = "%d.%d" % (self["major_version"].value, self["minor_version"].value)
+        if version in self.KNOWN_VERSIONS:
+            return "Compiled Java class, %s" % self.KNOWN_VERSIONS[version]
+        else:
+            return "Compiled Java class, version %s" % version
+
+    def createFields(self):
+        yield textHandler(UInt32(self, "magic", "Java compiled class signature"),
+            hexadecimal)
+        yield UInt16(self, "minor_version", "Class format minor version")
+        yield UInt16(self, "major_version", "Class format major version")
+        yield UInt16(self, "constant_pool_count", "Size of the constant pool")
+        if self["constant_pool_count"].value > 1:
+            #yield FieldArray(self, "constant_pool", CPInfo,
+            #        (self["constant_pool_count"].value - 1), first_index=1)
+            # Mmmh... can't use FieldArray actually, because ConstantPool
+            # requires some specific hacks (skipping some indexes after Long
+            # and Double entries).
+            yield ConstantPool(self, "constant_pool",
+                    (self["constant_pool_count"].value))
+
+        # Inner class access flags (16 bits)
+        yield NullBits(self, "reserved[]", 5)
+        yield Bit(self, "abstract")
+        yield Bit(self, "interface")
+        yield NullBits(self, "reserved[]", 3)
+        yield Bit(self, "super")
+        yield Bit(self, "final")
+        yield Bit(self, "static")
+        yield Bit(self, "protected")
+        yield Bit(self, "private")
+        yield Bit(self, "public")
+
+        yield CPIndex(self, "this_class", "Class name", target_types="Class")
+        yield CPIndex(self, "super_class", "Super class name", target_types="Class")
+        yield UInt16(self, "interfaces_count", "Number of implemented interfaces")
+        if self["interfaces_count"].value > 0:
+            yield FieldArray(self, "interfaces", CPIndex,
+                    self["interfaces_count"].value, target_types="Class")
+        yield UInt16(self, "fields_count", "Number of fields")
+        if self["fields_count"].value > 0:
+            yield FieldArray(self, "fields", FieldInfo,
+                    self["fields_count"].value)
+        yield UInt16(self, "methods_count", "Number of methods")
+        if self["methods_count"].value > 0:
+            yield FieldArray(self, "methods", MethodInfo,
+                    self["methods_count"].value)
+        yield UInt16(self, "attributes_count", "Number of attributes")
+        if self["attributes_count"].value > 0:
+            yield FieldArray(self, "attributes", AttributeInfo,
+                    self["attributes_count"].value)
+
+# vim: set expandtab tabstop=4 shiftwidth=4 autoindent smartindent:
diff --git a/lib/hachoir_parser/program/prc.py b/lib/hachoir_parser/program/prc.py
new file mode 100644
index 0000000000000000000000000000000000000000..f4db0254ceb1da5a6ce6dd9a62955f212cd1e72b
--- /dev/null
+++ b/lib/hachoir_parser/program/prc.py
@@ -0,0 +1,82 @@
+"""
+PRC (Palm resource) parser.
+
+Author: Sebastien Ponce
+Creation date: 29 october 2008
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt16, UInt32, TimestampMac32,
+    String, RawBytes)
+from hachoir_core.endian import BIG_ENDIAN
+
+class PRCHeader(FieldSet):
+    static_size = 78*8
+
+    def createFields(self):
+        yield String(self, "name", 32, "Name")
+        yield UInt16(self, "flags", "Flags")
+        yield UInt16(self, "version", "Version")
+        yield TimestampMac32(self, "create_time", "Creation time")
+        yield TimestampMac32(self, "mod_time", "Modification time")
+        yield TimestampMac32(self, "backup_time", "Backup time")
+        yield UInt32(self, "mod_num", "mod num")
+        yield UInt32(self, "app_info", "app info")
+        yield UInt32(self, "sort_info", "sort info")
+        yield UInt32(self, "type", "type")
+        yield UInt32(self, "id", "id")
+        yield UInt32(self, "unique_id_seed", "unique_id_seed")
+        yield UInt32(self, "next_record_list", "next_record_list")
+        yield UInt16(self, "num_records", "num_records")
+
+class ResourceHeader(FieldSet):
+    static_size = 10*8
+
+    def createFields(self):
+        yield String(self, "name", 4, "Name of the resource")
+        yield UInt16(self, "flags", "ID number of the resource")
+        yield UInt32(self, "offset", "Pointer to the resource data")
+
+    def createDescription(self):
+        return "Resource Header (%s)" % self["name"]
+
+class PRCFile(Parser):
+    PARSER_TAGS = {
+        "id": "prc",
+        "category": "program",
+        "file_ext": ("prc", ""),
+        "min_size": ResourceHeader.static_size,  # At least one program header
+        "mime": (
+            u"application/x-pilot-prc",
+            u"application/x-palmpilot"),
+        "description": "Palm Resource File"
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        # FIXME: Implement the validation function!
+        return False
+
+    def createFields(self):
+        # Parse header and program headers
+        yield PRCHeader(self, "header", "Header")
+        lens = []
+        firstOne = True
+        poff = 0
+        for index in xrange(self["header/num_records"].value):
+            r = ResourceHeader(self, "res_header[]")
+            if firstOne:
+                firstOne = False
+            else:
+                lens.append(r["offset"].value - poff)
+            poff = r["offset"].value
+            yield r
+        lens.append(self.size/8 - poff)
+        yield UInt16(self, "placeholder", "Place holder bytes")
+        for i in range(len(lens)):
+            yield RawBytes(self, "res[]", lens[i], '"'+self["res_header["+str(i)+"]/name"].value+"\" Resource")
+
+    def createDescription(self):
+        return "Palm Resource file"
+
diff --git a/lib/hachoir_parser/program/python.py b/lib/hachoir_parser/program/python.py
new file mode 100644
index 0000000000000000000000000000000000000000..6eea32bc7db1c611dde7b5f8a71cf633c657c703
--- /dev/null
+++ b/lib/hachoir_parser/program/python.py
@@ -0,0 +1,334 @@
+"""
+Python compiled source code parser.
+
+Informations:
+- Python 2.4.2 source code:
+  files Python/marshal.c and Python/import.c
+
+Author: Victor Stinner
+Creation: 25 march 2005
+"""
+
+DISASSEMBLE = False
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, UInt8,
+    UInt16, Int32, UInt32, Int64, ParserError, Float64, Enum,
+    Character, Bytes, RawBytes, PascalString8, TimestampUnix32)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.bits import long2raw
+from hachoir_core.text_handler import textHandler, hexadecimal
+from hachoir_core.i18n import ngettext
+if DISASSEMBLE:
+    from dis import dis
+
+    def disassembleBytecode(field):
+        bytecode = field.value
+        dis(bytecode)
+
+# --- String and string reference ---
+def parseString(parent):
+    yield UInt32(parent, "length", "Length")
+    length = parent["length"].value
+    if parent.name == "lnotab":
+        bytecode_offset=0
+        line_number=parent['../firstlineno'].value
+        for i in range(0,length,2):
+            bc_off_delta=UInt8(parent, 'bytecode_offset_delta[]')
+            yield bc_off_delta
+            bytecode_offset+=bc_off_delta.value
+            bc_off_delta._description='Bytecode Offset %i'%bytecode_offset
+            line_number_delta=UInt8(parent, 'line_number_delta[]')
+            yield line_number_delta
+            line_number+=line_number_delta.value
+            line_number_delta._description='Line Number %i'%line_number
+    elif 0 < length:
+        yield RawBytes(parent, "text", length, "Content")
+    if DISASSEMBLE and parent.name == "compiled_code":
+        disassembleBytecode(parent["text"])
+
+def parseStringRef(parent):
+    yield textHandler(UInt32(parent, "ref"), hexadecimal)
+def createStringRefDesc(parent):
+    return "String ref: %s" % parent["ref"].display
+
+# --- Integers ---
+def parseInt32(parent):
+    yield Int32(parent, "value")
+
+def parseInt64(parent):
+    yield Int64(parent, "value")
+
+def parseLong(parent):
+    yield Int32(parent, "digit_count")
+    for index in xrange( abs(parent["digit_count"].value) ):
+        yield UInt16(parent, "digit[]")
+
+
+# --- Float and complex ---
+def parseFloat(parent):
+    yield PascalString8(parent, "value")
+def parseBinaryFloat(parent):
+    yield Float64(parent, "value")
+def parseComplex(parent):
+    yield PascalString8(parent, "real")
+    yield PascalString8(parent, "complex")
+def parseBinaryComplex(parent):
+    yield Float64(parent, "real")
+    yield Float64(parent, "complex")
+
+
+# --- Tuple and list ---
+def parseTuple(parent):
+    yield Int32(parent, "count", "Item count")
+    count = parent["count"].value
+    if count < 0:
+        raise ParserError("Invalid tuple/list count")
+    for index in xrange(count):
+        yield Object(parent, "item[]")
+
+def createTupleDesc(parent):
+    count = parent["count"].value
+    items = ngettext("%s item", "%s items", count) % count
+    return "%s: %s" % (parent.code_info[2], items)
+
+
+# --- Dict ---
+def parseDict(parent):
+    """
+    Format is: (key1, value1, key2, value2, ..., keyn, valuen, NULL)
+    where each keyi and valuei is an object.
+    """
+    parent.count = 0
+    while True:
+        key = Object(parent, "key[]")
+        yield key
+        if key["bytecode"].value == "0":
+            break
+        yield Object(parent, "value[]")
+        parent.count += 1
+
+def createDictDesc(parent):
+    return "Dict: %s" % (ngettext("%s key", "%s keys", parent.count) % parent.count)
+
+# --- Code ---
+def parseCode(parent):
+    if 0x3000000 <= parent.root.getVersion():
+        yield UInt32(parent, "arg_count", "Argument count")
+        yield UInt32(parent, "kwonlyargcount", "Keyword only argument count")
+        yield UInt32(parent, "nb_locals", "Number of local variables")
+        yield UInt32(parent, "stack_size", "Stack size")
+        yield UInt32(parent, "flags")
+    elif 0x2030000 <= parent.root.getVersion():
+        yield UInt32(parent, "arg_count", "Argument count")
+        yield UInt32(parent, "nb_locals", "Number of local variables")
+        yield UInt32(parent, "stack_size", "Stack size")
+        yield UInt32(parent, "flags")
+    else:
+        yield UInt16(parent, "arg_count", "Argument count")
+        yield UInt16(parent, "nb_locals", "Number of local variables")
+        yield UInt16(parent, "stack_size", "Stack size")
+        yield UInt16(parent, "flags")
+    yield Object(parent, "compiled_code")
+    yield Object(parent, "consts")
+    yield Object(parent, "names")
+    yield Object(parent, "varnames")
+    if 0x2000000 <= parent.root.getVersion():
+        yield Object(parent, "freevars")
+        yield Object(parent, "cellvars")
+    yield Object(parent, "filename")
+    yield Object(parent, "name")
+    if 0x2030000 <= parent.root.getVersion():
+        yield UInt32(parent, "firstlineno", "First line number")
+    else:
+        yield UInt16(parent, "firstlineno", "First line number")
+    yield Object(parent, "lnotab")
+
+class Object(FieldSet):
+    bytecode_info = {
+        # Don't contains any data
+        '0': ("null", None, "NULL", None),
+        'N': ("none", None, "None", None),
+        'F': ("false", None, "False", None),
+        'T': ("true", None, "True", None),
+        'S': ("stop_iter", None, "StopIter", None),
+        '.': ("ellipsis", None, "ELLIPSIS", None),
+        '?': ("unknown", None, "Unknown", None),
+
+        'i': ("int32", parseInt32, "Int32", None),
+        'I': ("int64", parseInt64, "Int64", None),
+        'f': ("float", parseFloat, "Float", None),
+        'g': ("bin_float", parseBinaryFloat, "Binary float", None),
+        'x': ("complex", parseComplex, "Complex", None),
+        'y': ("bin_complex", parseBinaryComplex, "Binary complex", None),
+        'l': ("long", parseLong, "Long", None),
+        's': ("string", parseString, "String", None),
+        't': ("interned", parseString, "Interned", None),
+        'u': ("unicode", parseString, "Unicode", None),
+        'R': ("string_ref", parseStringRef, "String ref", createStringRefDesc),
+        '(': ("tuple", parseTuple, "Tuple", createTupleDesc),
+        '[': ("list", parseTuple, "List", createTupleDesc),
+        '<': ("set", parseTuple, "Set", createTupleDesc),
+        '>': ("frozenset", parseTuple, "Frozen set", createTupleDesc),
+        '{': ("dict", parseDict, "Dict", createDictDesc),
+        'c': ("code", parseCode, "Code", None),
+    }
+
+    def __init__(self, parent, name, **kw):
+        FieldSet.__init__(self, parent, name, **kw)
+        code = self["bytecode"].value
+        if code not in self.bytecode_info:
+            raise ParserError('Unknown bytecode: "%s"' % code)
+        self.code_info = self.bytecode_info[code]
+        if not name:
+            self._name = self.code_info[0]
+        if code == "l":
+            self.createValue = self.createValueLong
+        elif code in ("i", "I", "f", "g"):
+            self.createValue = lambda: self["value"].value
+        elif code == "T":
+            self.createValue = lambda: True
+        elif code == "F":
+            self.createValue = lambda: False
+        elif code in ("x", "y"):
+            self.createValue = self.createValueComplex
+        elif code in ("s", "t", "u"):
+            self.createValue = self.createValueString
+            self.createDisplay = self.createDisplayString
+            if code == 't':
+                if not hasattr(self.root,'string_table'):
+                    self.root.string_table=[]
+                self.root.string_table.append(self)
+        elif code == 'R':
+            if hasattr(self.root,'string_table'):
+                self.createValue = self.createValueStringRef
+
+    def createValueString(self):
+        if "text" in self:
+            return self["text"].value
+        else:
+            return ""
+
+    def createDisplayString(self):
+        if "text" in self:
+            return self["text"].display
+        else:
+            return "(empty)"
+
+    def createValueLong(self):
+        is_negative = self["digit_count"].value < 0
+        count = abs(self["digit_count"].value)
+        total = 0
+        for index in xrange(count-1, -1, -1):
+            total <<= 15
+            total += self["digit[%u]" % index].value
+        if is_negative:
+            total = -total
+        return total
+
+    def createValueStringRef(self):
+        return self.root.string_table[self['ref'].value].value
+
+    def createDisplayStringRef(self):
+        return self.root.string_table[self['ref'].value].display
+
+    def createValueComplex(self):
+        return complex(
+            float(self["real"].value),
+            float(self["complex"].value))
+
+    def createFields(self):
+        yield Character(self, "bytecode", "Bytecode")
+        parser = self.code_info[1]
+        if parser:
+            for field in parser(self):
+                yield field
+
+    def createDescription(self):
+        create = self.code_info[3]
+        if create:
+            return create(self)
+        else:
+            return self.code_info[2]
+
+class PythonCompiledFile(Parser):
+    PARSER_TAGS = {
+        "id": "python",
+        "category": "program",
+        "file_ext": ("pyc", "pyo"),
+        "min_size": 9*8,
+        "description": "Compiled Python script (.pyc/.pyo files)"
+    }
+    endian = LITTLE_ENDIAN
+
+    # Dictionnary which associate the pyc signature (32-bit integer)
+    # to a Python version string (eg. "m\xf2\r\n" => "Python 2.4b1").
+    # This list comes from CPython source code, see "MAGIC"
+    # and "pyc_magic" in file Python/import.c
+    MAGIC = {
+        # Python 1.x
+        20121: ("1.5", 0x1050000),
+
+        # Python 2.x
+        50823: ("2.0", 0x2000000),
+        60202: ("2.1", 0x2010000),
+        60717: ("2.2", 0x2020000),
+        62011: ("2.3a0", 0x2030000),
+        62021: ("2.3a0", 0x2030000),
+        62041: ("2.4a0", 0x2040000),
+        62051: ("2.4a3", 0x2040000),
+        62061: ("2.4b1", 0x2040000),
+        62071: ("2.5a0", 0x2050000),
+        62081: ("2.5a0 (ast-branch)", 0x2050000),
+        62091: ("2.5a0 (with)", 0x2050000),
+        62092: ("2.5a0 (WITH_CLEANUP opcode)", 0x2050000),
+        62101: ("2.5b3", 0x2050000),
+        62111: ("2.5b3", 0x2050000),
+        62121: ("2.5c1", 0x2050000),
+        62131: ("2.5c2", 0x2050000),
+
+        # Python 3.x
+        3000:  ("3.0 (3000)",  0x3000000),
+        3010:  ("3.0 (3010)",  0x3000000),
+        3020:  ("3.0 (3020)",  0x3000000),
+        3030:  ("3.0 (3030)",  0x3000000),
+        3040:  ("3.0 (3040)",  0x3000000),
+        3050:  ("3.0 (3050)",  0x3000000),
+        3060:  ("3.0 (3060)",  0x3000000),
+        3070:  ("3.0 (3070)",  0x3000000),
+        3080:  ("3.0 (3080)",  0x3000000),
+        3090:  ("3.0 (3090)",  0x3000000),
+        3100:  ("3.0 (3100)",  0x3000000),
+        3102:  ("3.0 (3102)",  0x3000000),
+        3110:  ("3.0a4",       0x3000000),
+        3130:  ("3.0a5",       0x3000000),
+        3131:  ("3.0a5 unicode",       0x3000000),
+    }
+
+    # Dictionnary which associate the pyc signature (4-byte long string)
+    # to a Python version string (eg. "m\xf2\r\n" => "2.4b1")
+    STR_MAGIC = dict( \
+        (long2raw(magic | (ord('\r')<<16) | (ord('\n')<<24), LITTLE_ENDIAN), value[0]) \
+        for magic, value in MAGIC.iteritems())
+
+    def validate(self):
+        signature = self.stream.readBits(0, 16, self.endian)
+        if signature not in self.MAGIC:
+            return "Unknown version (%s)" % signature
+        if self.stream.readBytes(2*8, 2) != "\r\n":
+            return r"Wrong signature (\r\n)"
+        if self.stream.readBytes(8*8, 1) != 'c':
+            return "First object bytecode is not code"
+        return True
+
+    def getVersion(self):
+        if not hasattr(self, "version"):
+            signature = self.stream.readBits(0, 16, self.endian)
+            self.version = self.MAGIC[signature][1]
+        return self.version
+
+    def createFields(self):
+        yield Enum(Bytes(self, "signature", 4, "Python file signature and version"), self.STR_MAGIC)
+        yield TimestampUnix32(self, "timestamp", "Timestamp")
+        yield Object(self, "content")
+
diff --git a/lib/hachoir_parser/template.py b/lib/hachoir_parser/template.py
new file mode 100644
index 0000000000000000000000000000000000000000..836215c1217b625c697f637a24580e889d39b6cb
--- /dev/null
+++ b/lib/hachoir_parser/template.py
@@ -0,0 +1,54 @@
+"""
+====================== 8< ============================
+This file is an Hachoir parser template. Make a copy
+of it, and adapt it to your needs.
+
+You have to replace all "TODO" with you code.
+====================== 8< ============================
+
+TODO parser.
+
+Author: TODO TODO
+Creation date: YYYY-mm-DD
+"""
+
+# TODO: Just keep what you need
+from hachoir_parser import Parser
+from hachoir_core.field import (ParserError,
+    UInt8, UInt16, UInt32, String, RawBytes)
+from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN
+
+class TODOFile(Parser):
+    PARSER_TAGS = {
+        "id": "TODO",
+        "category": "TODO",    # "archive", "audio", "container", ...
+        "file_ext": ("TODO",), # TODO: Example ("bmp",) to parse the file "image.bmp"
+        "mime": (u"TODO"),      # TODO: Example: "image/png"
+        "min_size": 0,         # TODO: Minimum file size (x bits, or x*8 in bytes)
+        "description": "TODO", # TODO: Example: "A bitmap picture"
+    }
+
+#    TODO: Choose between little or big endian
+#    endian = LITTLE_ENDIAN
+#    endian = BIG_ENDIAN
+
+    def validate(self):
+        # TODO: Check that file looks like your format
+        # Example: check first two bytes
+        # return (self.stream.readBytes(0, 2) == 'BM')
+        return False
+
+    def createFields(self):
+        # TODO: Write your parser using this model:
+        # yield UInt8(self, "name1", "description1")
+        # yield UInt16(self, "name2", "description2")
+        # yield UInt32(self, "name3", "description3")
+        # yield String(self, "name4", 1, "description4") # TODO: add ", charset="ASCII")"
+        # yield String(self, "name5", 1, "description5", charset="ASCII")
+        # yield String(self, "name6", 1, "description6", charset="ISO-8859-1")
+
+        # Read rest of the file (if any)
+        # TODO: You may remove this code
+        if self.current_size < self._size:
+            yield self.seekBit(self._size, "end")
+
diff --git a/lib/hachoir_parser/version.py b/lib/hachoir_parser/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..28d1e616bd4338d40500d8611f035ace7833bed0
--- /dev/null
+++ b/lib/hachoir_parser/version.py
@@ -0,0 +1,5 @@
+__version__ = "1.3.4"
+PACKAGE = "hachoir-parser"
+WEBSITE = "http://bitbucket.org/haypo/hachoir/wiki/hachoir-parser"
+LICENSE = 'GNU GPL v2'
+
diff --git a/lib/hachoir_parser/video/__init__.py b/lib/hachoir_parser/video/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..26f787e9589f668688265e7a08f5d8791f20191e
--- /dev/null
+++ b/lib/hachoir_parser/video/__init__.py
@@ -0,0 +1,6 @@
+from hachoir_parser.video.asf import AsfFile
+from hachoir_parser.video.flv import FlvFile
+from hachoir_parser.video.mov import MovFile
+from hachoir_parser.video.mpeg_video import MPEGVideoFile
+from hachoir_parser.video.mpeg_ts import MPEG_TS
+
diff --git a/lib/hachoir_parser/video/amf.py b/lib/hachoir_parser/video/amf.py
new file mode 100644
index 0000000000000000000000000000000000000000..496c5c1d6c1c7bdb9e6c94edf5a424b364e870af
--- /dev/null
+++ b/lib/hachoir_parser/video/amf.py
@@ -0,0 +1,110 @@
+"""
+AMF metadata (inside Flash video, FLV file) parser.
+
+Documentation:
+
+ - flashticle: Python project to read Flash (formats SWF, FLV and AMF)
+   http://undefined.org/python/#flashticle
+
+Author: Victor Stinner
+Creation date: 4 november 2006
+"""
+
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt8, UInt16, UInt32, PascalString16, Float64)
+from hachoir_core.tools import timestampUNIX
+
+def parseUTF8(parent):
+    yield PascalString16(parent, "value", charset="UTF-8")
+
+def parseDouble(parent):
+    yield Float64(parent, "value")
+
+def parseBool(parent):
+    yield UInt8(parent, "value")
+
+def parseArray(parent):
+    yield UInt32(parent, "count")
+    for index in xrange(parent["count"].value):
+        yield AMFObject(parent, "item[]")
+
+def parseObjectAttributes(parent):
+    while True:
+        item = Attribute(parent, "attr[]")
+        yield item
+        if item["key"].value == "":
+            break
+
+def parseMixedArray(parent):
+    yield UInt32(parent, "count")
+    for index in xrange(parent["count"].value + 1):
+        item = Attribute(parent, "item[]")
+        yield item
+        if not item['key'].value:
+            break
+
+def parseDate(parent):
+    yield Float64(parent, "timestamp_microsec")
+    yield UInt16(parent, "timestamp_sec")
+
+def parseNothing(parent):
+    raise StopIteration()
+
+class AMFObject(FieldSet):
+    CODE_DATE = 11
+    tag_info = {
+        # http://osflash.org/amf/astypes
+         0: (parseDouble, "Double"),
+         1: (parseBool, "Boolean"),
+         2: (parseUTF8, "UTF-8 string"),
+         3: (parseObjectAttributes, "Object attributes"),
+        #MOVIECLIP = '\x04',
+        #NULL = '\x05',
+        #UNDEFINED = '\x06',
+        #REFERENCE = '\x07',
+         8: (parseMixedArray, "Mixed array"),
+         9: (parseNothing, "End of object"),
+        10: (parseArray, "Array"),
+        CODE_DATE: (parseDate, "Date"),
+        #LONGUTF8 = '\x0c',
+        #UNSUPPORTED = '\x0d',
+        ## Server-to-client only
+        #RECORDSET = '\x0e',
+        #XML = '\x0f',
+        #TYPEDOBJECT = '\x10',
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        code = self["type"].value
+        try:
+            self.parser, desc = self.tag_info[code]
+            if code == self.CODE_DATE:
+                self.createValue = self.createValueDate
+        except KeyError:
+            raise ParserError("AMF: Unable to parse type %s" % code)
+
+    def createFields(self):
+        yield UInt8(self, "type")
+        for field in self.parser(self):
+            yield field
+
+    def createValueDate(self):
+        value = (self["timestamp_microsec"].value * 0.001) \
+            - (self["timestamp_sec"].value * 60)
+        return timestampUNIX(value)
+
+class Attribute(AMFObject):
+    def __init__(self, *args):
+        AMFObject.__init__(self, *args)
+        self._description = None
+
+    def createFields(self):
+        yield PascalString16(self, "key", charset="UTF-8")
+        yield UInt8(self, "type")
+        for field in self.parser(self):
+            yield field
+
+    def createDescription(self):
+        return 'Attribute "%s"' % self["key"].value
+
diff --git a/lib/hachoir_parser/video/asf.py b/lib/hachoir_parser/video/asf.py
new file mode 100644
index 0000000000000000000000000000000000000000..39205ea67f856c7559286eb838a1c976b6bc3f87
--- /dev/null
+++ b/lib/hachoir_parser/video/asf.py
@@ -0,0 +1,356 @@
+"""
+Advanced Streaming Format (ASF) parser, format used by Windows Media Video
+(WMF) and Windows Media Audio (WMA).
+
+Informations:
+- http://www.microsoft.com/windows/windowsmedia/forpros/format/asfspec.aspx
+- http://swpat.ffii.org/pikta/xrani/asf/index.fr.html
+
+Author: Victor Stinner
+Creation: 5 august 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError,
+    UInt16, UInt32, UInt64,
+    TimestampWin64, TimedeltaWin64,
+    String, PascalString16, Enum,
+    Bit, Bits, PaddingBits,
+    PaddingBytes, NullBytes, RawBytes)
+from hachoir_core.endian import LITTLE_ENDIAN
+from hachoir_core.text_handler import (
+    displayHandler, filesizeHandler)
+from hachoir_core.tools import humanBitRate
+from itertools import izip
+from hachoir_parser.video.fourcc import audio_codec_name, video_fourcc_name
+from hachoir_parser.common.win32 import BitmapInfoHeader, GUID
+
+MAX_HEADER_SIZE = 100 * 1024  # bytes
+
+class AudioHeader(FieldSet):
+    guid = "F8699E40-5B4D-11CF-A8FD-00805F5C442B"
+    def createFields(self):
+        yield Enum(UInt16(self, "twocc"), audio_codec_name)
+        yield UInt16(self, "channels")
+        yield UInt32(self, "sample_rate")
+        yield UInt32(self, "bit_rate")
+        yield UInt16(self, "block_align")
+        yield UInt16(self, "bits_per_sample")
+        yield UInt16(self, "codec_specific_size")
+        size = self["codec_specific_size"].value
+        if size:
+            yield RawBytes(self, "codec_specific", size)
+
+class BitrateMutualExclusion(FieldSet):
+    guid = "D6E229DC-35DA-11D1-9034-00A0C90349BE"
+    mutex_name = {
+        "D6E22A00-35DA-11D1-9034-00A0C90349BE": "Language",
+        "D6E22A01-35DA-11D1-9034-00A0C90349BE": "Bitrate",
+        "D6E22A02-35DA-11D1-9034-00A0C90349BE": "Unknown",
+    }
+
+    def createFields(self):
+        yield Enum(GUID(self, "exclusion_type"), self.mutex_name)
+        yield UInt16(self, "nb_stream")
+        for index in xrange(self["nb_stream"].value):
+            yield UInt16(self, "stream[]")
+
+class VideoHeader(FieldSet):
+    guid = "BC19EFC0-5B4D-11CF-A8FD-00805F5C442B"
+    def createFields(self):
+        if False:
+            yield UInt32(self, "width0")
+            yield UInt32(self, "height0")
+            yield PaddingBytes(self, "reserved[]", 7)
+            yield UInt32(self, "width")
+            yield UInt32(self, "height")
+            yield PaddingBytes(self, "reserved[]", 2)
+            yield UInt16(self, "depth")
+            yield Enum(String(self, "codec", 4, charset="ASCII"), video_fourcc_name)
+            yield NullBytes(self, "padding", 20)
+        else:
+            yield UInt32(self, "width")
+            yield UInt32(self, "height")
+            yield PaddingBytes(self, "reserved[]", 1)
+            yield UInt16(self, "format_data_size")
+            if self["format_data_size"].value < 40:
+                raise ParserError("Unknown format data size")
+            yield BitmapInfoHeader(self, "bmp_info", use_fourcc=True)
+
+class FileProperty(FieldSet):
+    guid = "8CABDCA1-A947-11CF-8EE4-00C00C205365"
+    def createFields(self):
+        yield GUID(self, "guid")
+        yield filesizeHandler(UInt64(self, "file_size"))
+        yield TimestampWin64(self, "creation_date")
+        yield UInt64(self, "pckt_count")
+        yield TimedeltaWin64(self, "play_duration")
+        yield TimedeltaWin64(self, "send_duration")
+        yield UInt64(self, "preroll")
+        yield Bit(self, "broadcast", "Is broadcast?")
+        yield Bit(self, "seekable", "Seekable stream?")
+        yield PaddingBits(self, "reserved[]", 30)
+        yield filesizeHandler(UInt32(self, "min_pckt_size"))
+        yield filesizeHandler(UInt32(self, "max_pckt_size"))
+        yield displayHandler(UInt32(self, "max_bitrate"), humanBitRate)
+
+class HeaderExtension(FieldSet):
+    guid = "5FBF03B5-A92E-11CF-8EE3-00C00C205365"
+    def createFields(self):
+        yield GUID(self, "reserved[]")
+        yield UInt16(self, "reserved[]")
+        yield UInt32(self, "size")
+        if self["size"].value:
+            yield RawBytes(self, "data", self["size"].value)
+
+class Header(FieldSet):
+    guid = "75B22630-668E-11CF-A6D9-00AA0062CE6C"
+    def createFields(self):
+        yield UInt32(self, "obj_count")
+        yield PaddingBytes(self, "reserved[]", 2)
+        for index in xrange(self["obj_count"].value):
+            yield Object(self, "object[]")
+
+class Metadata(FieldSet):
+    guid = "75B22633-668E-11CF-A6D9-00AA0062CE6C"
+    names = ("title", "author", "copyright", "xxx", "yyy")
+    def createFields(self):
+        for index in xrange(5):
+            yield UInt16(self, "size[]")
+        for name, size in izip(self.names, self.array("size")):
+            if size.value:
+                yield String(self, name, size.value, charset="UTF-16-LE", strip=" \0")
+
+class Descriptor(FieldSet):
+    """
+    See ExtendedContentDescription class.
+    """
+    TYPE_BYTE_ARRAY = 1
+    TYPE_NAME = {
+        0: "Unicode",
+        1: "Byte array",
+        2: "BOOL (32 bits)",
+        3: "DWORD (32 bits)",
+        4: "QWORD (64 bits)",
+        5: "WORD (16 bits)"
+    }
+    def createFields(self):
+        yield PascalString16(self, "name", "Name", charset="UTF-16-LE", strip="\0")
+        yield Enum(UInt16(self, "type"), self.TYPE_NAME)
+        yield UInt16(self, "value_length")
+        type = self["type"].value
+        size = self["value_length"].value
+        name = "value"
+        if type == 0 and (size % 2) == 0:
+            yield String(self, name, size, charset="UTF-16-LE", strip="\0")
+        elif type in (2, 3):
+            yield UInt32(self, name)
+        elif type == 4:
+            yield UInt64(self, name)
+        else:
+            yield RawBytes(self, name, size)
+
+class ExtendedContentDescription(FieldSet):
+    guid = "D2D0A440-E307-11D2-97F0-00A0C95EA850"
+    def createFields(self):
+        yield UInt16(self, "count")
+        for index in xrange(self["count"].value):
+            yield Descriptor(self, "descriptor[]")
+
+class Codec(FieldSet):
+    """
+    See CodecList class.
+    """
+    type_name = {
+        1: "video",
+        2: "audio"
+    }
+    def createFields(self):
+        yield Enum(UInt16(self, "type"), self.type_name)
+        yield UInt16(self, "name_len", "Name length in character (byte=len*2)")
+        if self["name_len"].value:
+            yield String(self, "name", self["name_len"].value*2, "Name", charset="UTF-16-LE", strip=" \0")
+        yield UInt16(self, "desc_len", "Description length in character (byte=len*2)")
+        if self["desc_len"].value:
+            yield String(self, "desc", self["desc_len"].value*2, "Description", charset="UTF-16-LE", strip=" \0")
+        yield UInt16(self, "info_len")
+        if self["info_len"].value:
+            yield RawBytes(self, "info", self["info_len"].value)
+
+class CodecList(FieldSet):
+    guid = "86D15240-311D-11D0-A3A4-00A0C90348F6"
+
+    def createFields(self):
+        yield GUID(self, "reserved[]")
+        yield UInt32(self, "count")
+        for index in xrange(self["count"].value):
+            yield Codec(self, "codec[]")
+
+class SimpleIndexEntry(FieldSet):
+    """
+    See SimpleIndex class.
+    """
+    def createFields(self):
+        yield UInt32(self, "pckt_number")
+        yield UInt16(self, "pckt_count")
+
+class SimpleIndex(FieldSet):
+    guid = "33000890-E5B1-11CF-89F4-00A0C90349CB"
+
+    def createFields(self):
+        yield GUID(self, "file_id")
+        yield TimedeltaWin64(self, "entry_interval")
+        yield UInt32(self, "max_pckt_count")
+        yield UInt32(self, "entry_count")
+        for index in xrange(self["entry_count"].value):
+            yield SimpleIndexEntry(self, "entry[]")
+
+class BitRate(FieldSet):
+    """
+    See BitRateList class.
+    """
+    def createFields(self):
+        yield Bits(self, "stream_index", 7)
+        yield PaddingBits(self, "reserved", 9)
+        yield displayHandler(UInt32(self, "avg_bitrate"), humanBitRate)
+
+class BitRateList(FieldSet):
+    guid = "7BF875CE-468D-11D1-8D82-006097C9A2B2"
+
+    def createFields(self):
+        yield UInt16(self, "count")
+        for index in xrange(self["count"].value):
+            yield BitRate(self, "bit_rate[]")
+
+class Data(FieldSet):
+    guid = "75B22636-668E-11CF-A6D9-00AA0062CE6C"
+
+    def createFields(self):
+        yield GUID(self, "file_id")
+        yield UInt64(self, "packet_count")
+        yield PaddingBytes(self, "reserved", 2)
+        size = (self.size - self.current_size) / 8
+        yield RawBytes(self, "data", size)
+
+class StreamProperty(FieldSet):
+    guid = "B7DC0791-A9B7-11CF-8EE6-00C00C205365"
+    def createFields(self):
+        yield GUID(self, "type")
+        yield GUID(self, "error_correction")
+        yield UInt64(self, "time_offset")
+        yield UInt32(self, "data_len")
+        yield UInt32(self, "error_correct_len")
+        yield Bits(self, "stream_index", 7)
+        yield Bits(self, "reserved[]", 8)
+        yield Bit(self, "encrypted", "Content is encrypted?")
+        yield UInt32(self, "reserved[]")
+        size = self["data_len"].value
+        if size:
+            tag = self["type"].value
+            if tag in Object.TAG_INFO:
+                name, parser = Object.TAG_INFO[tag][0:2]
+                yield parser(self, name, size=size*8)
+            else:
+                yield RawBytes(self, "data", size)
+        size = self["error_correct_len"].value
+        if size:
+            yield RawBytes(self, "error_correct", size)
+
+class Object(FieldSet):
+    # This list is converted to a dictionnary later where the key is the GUID
+    TAG_INFO = (
+        ("header", Header, "Header object"),
+        ("file_prop", FileProperty, "File property"),
+        ("header_ext", HeaderExtension, "Header extension"),
+        ("codec_list", CodecList, "Codec list"),
+        ("simple_index", SimpleIndex, "Simple index"),
+        ("data", Data, "Data object"),
+        ("stream_prop[]", StreamProperty, "Stream properties"),
+        ("bit_rates", BitRateList, "Bit rate list"),
+        ("ext_desc", ExtendedContentDescription, "Extended content description"),
+        ("metadata", Metadata, "Metadata"),
+        ("video_header", VideoHeader, "Video"),
+        ("audio_header", AudioHeader, "Audio"),
+        ("bitrate_mutex", BitrateMutualExclusion, "Bitrate mutual exclusion"),
+    )
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+
+        tag = self["guid"].value
+        if tag not in self.TAG_INFO:
+            self.handler = None
+            return
+        info = self.TAG_INFO[tag]
+        self._name = info[0]
+        self.handler = info[1]
+
+    def createFields(self):
+        yield GUID(self, "guid")
+        yield filesizeHandler(UInt64(self, "size"))
+
+        size = self["size"].value - self.current_size/8
+        if 0 < size:
+            if self.handler:
+                yield self.handler(self, "content", size=size*8)
+            else:
+                yield RawBytes(self, "content", size)
+
+tag_info_list = Object.TAG_INFO
+Object.TAG_INFO = dict( (parser[1].guid, parser) for parser in tag_info_list )
+
+class AsfFile(Parser):
+    MAGIC = "\x30\x26\xB2\x75\x8E\x66\xCF\x11\xA6\xD9\x00\xAA\x00\x62\xCE\x6C"
+    PARSER_TAGS = {
+        "id": "asf",
+        "category": "video",
+        "file_ext": ("wmv", "wma", "asf"),
+        "mime": (u"video/x-ms-asf", u"video/x-ms-wmv", u"audio/x-ms-wma"),
+        "min_size": 24*8,
+        "description": "Advanced Streaming Format (ASF), used for WMV (video) and WMA (audio)",
+        "magic": ((MAGIC, 0),),
+    }
+    FILE_TYPE = {
+        "video/x-ms-wmv": (".wmv", u"Window Media Video (wmv)"),
+        "video/x-ms-asf": (".asf", u"ASF container"),
+        "audio/x-ms-wma": (".wma", u"Window Media Audio (wma)"),
+    }
+    endian = LITTLE_ENDIAN
+
+    def validate(self):
+        magic = self.MAGIC
+        if self.stream.readBytes(0, len(magic)) != magic:
+            return "Invalid magic"
+        header = self[0]
+        if not(30 <= header["size"].value  <= MAX_HEADER_SIZE):
+            return "Invalid header size (%u)" % header["size"].value
+        return True
+
+    def createMimeType(self):
+        audio = False
+        for prop in self.array("header/content/stream_prop"):
+            guid = prop["content/type"].value
+            if guid == VideoHeader.guid:
+                return u"video/x-ms-wmv"
+            if guid == AudioHeader.guid:
+                audio = True
+        if audio:
+            return u"audio/x-ms-wma"
+        else:
+            return u"video/x-ms-asf"
+
+    def createFields(self):
+        while not self.eof:
+            yield Object(self, "object[]")
+
+    def createDescription(self):
+        return self.FILE_TYPE[self.mime_type][1]
+
+    def createFilenameSuffix(self):
+        return self.FILE_TYPE[self.mime_type][0]
+
+    def createContentSize(self):
+        if self[0].name != "header":
+            return None
+        return self["header/content/file_prop/content/file_size"].value * 8
+
diff --git a/lib/hachoir_parser/video/flv.py b/lib/hachoir_parser/video/flv.py
new file mode 100644
index 0000000000000000000000000000000000000000..5edbe7ab2a3500f54ddfbd75b7e1f56f93bd9270
--- /dev/null
+++ b/lib/hachoir_parser/video/flv.py
@@ -0,0 +1,157 @@
+"""
+FLV video parser.
+
+Documentation:
+
+ - FLV File format: http://osflash.org/flv
+ - libavformat from ffmpeg project
+ - flashticle: Python project to read Flash (SWF and FLV with AMF metadata)
+   http://undefined.org/python/#flashticle
+
+Author: Victor Stinner
+Creation date: 4 november 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet,
+    UInt8, UInt24, UInt32, NullBits, NullBytes,
+    Bit, Bits, String, RawBytes, Enum)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_parser.audio.mpeg_audio import Frame
+from hachoir_parser.video.amf import AMFObject
+from hachoir_core.tools import createDict
+
+SAMPLING_RATE = {
+    0: ( 5512, "5.5 kHz"),
+    1: (11025, "11 kHz"),
+    2: (22050, "22.1 kHz"),
+    3: (44100, "44.1 kHz"),
+}
+SAMPLING_RATE_VALUE = createDict(SAMPLING_RATE, 0)
+SAMPLING_RATE_TEXT = createDict(SAMPLING_RATE, 1)
+
+AUDIO_CODEC_MP3 = 2
+AUDIO_CODEC_NAME = {
+    0: u"Uncompressed",
+    1: u"ADPCM",
+    2: u"MP3",
+    5: u"Nellymoser 8kHz mono",
+    6: u"Nellymoser",
+}
+
+VIDEO_CODEC_NAME = {
+    2: u"Sorensen H.263",
+    3: u"Screen video",
+    4: u"On2 VP6",
+}
+
+FRAME_TYPE = {
+    1: u"keyframe",
+    2: u"inter frame",
+    3: u"disposable inter frame",
+}
+
+class Header(FieldSet):
+    def createFields(self):
+        yield String(self, "signature", 3, "FLV format signature", charset="ASCII")
+        yield UInt8(self, "version")
+
+        yield NullBits(self, "reserved[]", 5)
+        yield Bit(self, "type_flags_audio")
+        yield NullBits(self, "reserved[]", 1)
+        yield Bit(self, "type_flags_video")
+
+        yield UInt32(self, "data_offset")
+
+def parseAudio(parent, size):
+    yield Enum(Bits(parent, "codec", 4, "Audio codec"), AUDIO_CODEC_NAME)
+    yield Enum(Bits(parent, "sampling_rate", 2, "Sampling rate"), SAMPLING_RATE_TEXT)
+    yield Bit(parent, "is_16bit", "16-bit or 8-bit per sample")
+    yield Bit(parent, "is_stereo", "Stereo or mono channel")
+
+    size -= 1
+    if 0 < size:
+        if parent["codec"].value == AUDIO_CODEC_MP3 :
+            yield Frame(parent, "music_data", size=size*8)
+        else:
+            yield RawBytes(parent, "music_data", size)
+
+def parseVideo(parent, size):
+    yield Enum(Bits(parent, "frame_type", 4, "Frame type"), FRAME_TYPE)
+    yield Enum(Bits(parent, "codec", 4, "Video codec"), VIDEO_CODEC_NAME)
+    if 1 < size:
+        yield RawBytes(parent, "data", size-1)
+
+def parseAMF(parent, size):
+    while parent.current_size < parent.size:
+        yield AMFObject(parent, "entry[]")
+
+class Chunk(FieldSet):
+    tag_info = {
+         8: ("audio[]", parseAudio, ""),
+         9: ("video[]", parseVideo, ""),
+        18: ("metadata", parseAMF, ""),
+    }
+
+    def __init__(self, *args, **kw):
+        FieldSet.__init__(self, *args, **kw)
+        self._size = (11 + self["size"].value) * 8
+        tag = self["tag"].value
+        if tag in self.tag_info:
+            self._name, self.parser, self._description = self.tag_info[tag]
+        else:
+            self.parser = None
+
+    def createFields(self):
+        yield UInt8(self, "tag")
+        yield UInt24(self, "size", "Content size")
+        yield UInt24(self, "timestamp", "Timestamp in millisecond")
+        yield NullBytes(self, "reserved", 4)
+        size = self["size"].value
+        if size:
+            if self.parser:
+                for field in self.parser(self, size):
+                    yield field
+            else:
+                yield RawBytes(self, "content", size)
+
+    def getSampleRate(self):
+        try:
+            return SAMPLING_RATE_VALUE[self["sampling_rate"].value]
+        except LookupError:
+            return None
+
+class FlvFile(Parser):
+    PARSER_TAGS = {
+        "id": "flv",
+        "category": "video",
+        "file_ext": ("flv",),
+        "mime": (u"video/x-flv",),
+        "min_size": 9*4,
+        "magic": (
+            # Signature, version=1, flags=5 (video+audio), header size=9
+            ("FLV\1\x05\0\0\0\x09", 0),
+            # Signature, version=1, flags=5 (video), header size=9
+            ("FLV\1\x01\0\0\0\x09", 0),
+        ),
+        "description": u"Macromedia Flash video"
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        if self.stream.readBytes(0, 3) != "FLV":
+            return "Wrong file signature"
+        if self["header/data_offset"].value != 9:
+            return "Unknown data offset in main header"
+        return True
+
+    def createFields(self):
+        yield Header(self, "header")
+        yield UInt32(self, "prev_size[]", "Size of previous chunk")
+        while not self.eof:
+            yield Chunk(self, "chunk[]")
+            yield UInt32(self, "prev_size[]", "Size of previous chunk")
+
+    def createDescription(self):
+        return u"Macromedia Flash video version %s" % self["header/version"].value
+
diff --git a/lib/hachoir_parser/video/fourcc.py b/lib/hachoir_parser/video/fourcc.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d8ee692cfe18592897e268b98c6fabb3f4d6cd5
--- /dev/null
+++ b/lib/hachoir_parser/video/fourcc.py
@@ -0,0 +1,415 @@
+#
+# fourcc are codes to specify the encoding method a audio or video string
+# in RIFF file (.avi and .wav).
+#
+# The following lists come from mmpython project:
+#    file: mmpython/video/fourcc.py
+#    url:  http://sourceforge.net/projects/mmpython/
+#
+
+# List of codecs with no compression (compression rate=1.0)
+UNCOMPRESSED_AUDIO = set((1,3,6))
+
+audio_codec_name = {
+0x0000: u'Microsoft Unknown Wave Format',
+0x0001: u'Microsoft Pulse Code Modulation (PCM)',
+0x0002: u'Microsoft ADPCM',
+0x0003: u'IEEE Float',
+0x0004: u'Compaq Computer VSELP',
+0x0005: u'IBM CVSD',
+0x0006: u'Microsoft A-Law',
+0x0007: u'Microsoft mu-Law',
+0x0010: u'OKI ADPCM',
+0x0011: u'Intel DVI/IMA ADPCM',
+0x0012: u'Videologic MediaSpace ADPCM',
+0x0013: u'Sierra Semiconductor ADPCM',
+0x0014: u'Antex Electronics G.723 ADPCM',
+0x0015: u'DSP Solutions DigiSTD',
+0x0016: u'DSP Solutions DigiFIX',
+0x0017: u'Dialogic OKI ADPCM',
+0x0018: u'MediaVision ADPCM',
+0x0019: u'Hewlett-Packard CU',
+0x0020: u'Yamaha ADPCM',
+0x0021: u'Speech Compression Sonarc',
+0x0022: u'DSP Group TrueSpeech',
+0x0023: u'Echo Speech EchoSC1',
+0x0024: u'Audiofile AF36',
+0x0025: u'Audio Processing Technology APTX',
+0x0026: u'AudioFile AF10',
+0x0027: u'Prosody 1612',
+0x0028: u'LRC',
+0x0030: u'Dolby AC2',
+0x0031: u'Microsoft GSM 6.10',
+0x0032: u'MSNAudio',
+0x0033: u'Antex Electronics ADPCME',
+0x0034: u'Control Resources VQLPC',
+0x0035: u'DSP Solutions DigiREAL',
+0x0036: u'DSP Solutions DigiADPCM',
+0x0037: u'Control Resources CR10',
+0x0038: u'Natural MicroSystems VBXADPCM',
+0x0039: u'Crystal Semiconductor IMA ADPCM',
+0x003A: u'EchoSC3',
+0x003B: u'Rockwell ADPCM',
+0x003C: u'Rockwell Digit LK',
+0x003D: u'Xebec',
+0x0040: u'Antex Electronics G.721 ADPCM',
+0x0041: u'G.728 CELP',
+0x0042: u'MSG723',
+0x0050: u'Microsoft MPEG',
+0x0052: u'RT24',
+0x0053: u'PAC',
+0x0055: u'MPEG Layer 3',
+0x0059: u'Lucent G.723',
+0x0060: u'Cirrus',
+0x0061: u'ESPCM',
+0x0062: u'Voxware',
+0x0063: u'Canopus Atrac',
+0x0064: u'G.726 ADPCM',
+0x0065: u'G.722 ADPCM',
+0x0066: u'DSAT',
+0x0067: u'DSAT Display',
+0x0069: u'Voxware Byte Aligned',
+0x0070: u'Voxware AC8',
+0x0071: u'Voxware AC10',
+0x0072: u'Voxware AC16',
+0x0073: u'Voxware AC20',
+0x0074: u'Voxware MetaVoice',
+0x0075: u'Voxware MetaSound',
+0x0076: u'Voxware RT29HW',
+0x0077: u'Voxware VR12',
+0x0078: u'Voxware VR18',
+0x0079: u'Voxware TQ40',
+0x0080: u'Softsound',
+0x0081: u'Voxware TQ60',
+0x0082: u'MSRT24',
+0x0083: u'G.729A',
+0x0084: u'MVI MV12',
+0x0085: u'DF G.726',
+0x0086: u'DF GSM610',
+0x0088: u'ISIAudio',
+0x0089: u'Onlive',
+0x0091: u'SBC24',
+0x0092: u'Dolby AC3 SPDIF',
+0x0097: u'ZyXEL ADPCM',
+0x0098: u'Philips LPCBB',
+0x0099: u'Packed',
+0x0100: u'Rhetorex ADPCM',
+0x0101: u'IBM mu-law',
+0x0102: u'IBM A-law',
+0x0103: u'IBM AVC Adaptive Differential Pulse Code Modulation (ADPCM)',
+0x0111: u'Vivo G.723',
+0x0112: u'Vivo Siren',
+0x0123: u'Digital G.723',
+0x0140: u'Windows Media Video V8',
+0x0161: u'Windows Media Audio V7 / V8 / V9',
+0x0162: u'Windows Media Audio Professional V9',
+0x0163: u'Windows Media Audio Lossless V9',
+0x0200: u'Creative Labs ADPCM',
+0x0202: u'Creative Labs Fastspeech8',
+0x0203: u'Creative Labs Fastspeech10',
+0x0220: u'Quarterdeck',
+0x0300: u'FM Towns Snd',
+0x0300: u'Fujitsu FM Towns Snd',
+0x0400: u'BTV Digital',
+0x0680: u'VME VMPCM',
+0x1000: u'Olivetti GSM',
+0x1001: u'Olivetti ADPCM',
+0x1002: u'Olivetti CELP',
+0x1003: u'Olivetti SBC',
+0x1004: u'Olivetti OPR',
+0x1100: u'Lernout & Hauspie LH Codec',
+0x1400: u'Norris',
+0x1401: u'AT&T ISIAudio',
+0x1500: u'Soundspace Music Compression',
+0x2000: u'AC3',
+0x7A21: u'GSM-AMR (CBR, no SID)',
+0x7A22: u'GSM-AMR (VBR, including SID)',
+0xFFFF: u'Development codec'
+}
+
+video_fourcc_name = {
+'3IV1': u'3ivx v1',
+'3IV2': u'3ivx v2',
+'AASC': u'Autodesk Animator',
+'ABYR': u'Kensington ?ABYR?',
+'AEMI': u'Array VideoONE MPEG1-I Capture',
+'AFLC': u'Autodesk Animator FLC',
+'AFLI': u'Autodesk Animator FLI',
+'AMPG': u'Array VideoONE MPEG',
+'ANIM': u'Intel RDX (ANIM)',
+'AP41': u'AngelPotion Definitive',
+'ASV1': u'Asus Video v1',
+'ASV2': u'Asus Video v2',
+'ASVX': u'Asus Video 2.0 (audio)',
+'AUR2': u'Aura 2 Codec - YUV 4:2:2',
+'AURA': u'Aura 1 Codec - YUV 4:1:1',
+'BINK': u'RAD Game Tools Bink Video',
+'BT20': u'Conexant Prosumer Video',
+'BTCV': u'Conexant Composite Video Codec',
+'BW10': u'Data Translation Broadway MPEG Capture',
+'CC12': u'Intel YUV12',
+'CDVC': u'Canopus DV',
+'CFCC': u'Digital Processing Systems DPS Perception',
+'CGDI': u'Microsoft Office 97 Camcorder Video',
+'CHAM': u'Winnov Caviara Champagne',
+'CJPG': u'Creative WebCam JPEG',
+'CLJR': u'Cirrus Logic YUV 4 pixels',
+'CMYK': u'Common Data Format in Printing',
+'CPLA': u'Weitek 4:2:0 YUV Planar',
+'CRAM': u'Microsoft Video 1 (CRAM)',
+'CVID': u'Radius Cinepak',
+'CWLT': u'?CWLT?',
+'CWLT': u'Microsoft Color WLT DIB',
+'CYUV': u'Creative Labs YUV',
+'CYUY': u'ATI YUV',
+'D261': u'H.261',
+'D263': u'H.263',
+'DIV3': u'DivX v3 MPEG-4 Low-Motion',
+'DIV4': u'DivX v3 MPEG-4 Fast-Motion',
+'DIV5': u'?DIV5?',
+'DIVX': u'DivX v4',
+'divx': u'DivX',
+'DMB1': u'Matrox Rainbow Runner hardware MJPEG',
+'DMB2': u'Paradigm MJPEG',
+'DSVD': u'?DSVD?',
+'DUCK': u'Duck True Motion 1.0',
+'DVAN': u'?DVAN?',
+'DVE2': u'InSoft DVE-2 Videoconferencing',
+'dvsd': u'DV',
+'DVSD': u'DV',
+'DVX1': u'DVX1000SP Video Decoder',
+'DVX2': u'DVX2000S Video Decoder',
+'DVX3': u'DVX3000S Video Decoder',
+'DX50': u'DivX v5',
+'DXT1': u'Microsoft DirectX Compressed Texture (DXT1)',
+'DXT2': u'Microsoft DirectX Compressed Texture (DXT2)',
+'DXT3': u'Microsoft DirectX Compressed Texture (DXT3)',
+'DXT4': u'Microsoft DirectX Compressed Texture (DXT4)',
+'DXT5': u'Microsoft DirectX Compressed Texture (DXT5)',
+'DXTC': u'Microsoft DirectX Compressed Texture (DXTC)',
+'EKQ0': u'Elsa ?EKQ0?',
+'ELK0': u'Elsa ?ELK0?',
+'ESCP': u'Eidos Escape',
+'ETV1': u'eTreppid Video ETV1',
+'ETV2': u'eTreppid Video ETV2',
+'ETVC': u'eTreppid Video ETVC',
+'FLJP': u'D-Vision Field Encoded Motion JPEG',
+'FRWA': u'SoftLab-Nsk Forward Motion JPEG w/ alpha channel',
+'FRWD': u'SoftLab-Nsk Forward Motion JPEG',
+'FVF1': u'Iterated Systems Fractal Video Frame',
+'GLZW': u'Motion LZW (gabest@freemail.hu)',
+'GPEG': u'Motion JPEG (gabest@freemail.hu)',
+'GWLT': u'Microsoft Greyscale WLT DIB',
+'H260': u'Intel ITU H.260 Videoconferencing',
+'H261': u'Intel ITU H.261 Videoconferencing',
+'H262': u'Intel ITU H.262 Videoconferencing',
+'H263': u'Intel ITU H.263 Videoconferencing',
+'H264': u'Intel ITU H.264 Videoconferencing',
+'H265': u'Intel ITU H.265 Videoconferencing',
+'H266': u'Intel ITU H.266 Videoconferencing',
+'H267': u'Intel ITU H.267 Videoconferencing',
+'H268': u'Intel ITU H.268 Videoconferencing',
+'H269': u'Intel ITU H.269 Videoconferencing',
+'HFYU': u'Huffman Lossless Codec',
+'HMCR': u'Rendition Motion Compensation Format (HMCR)',
+'HMRR': u'Rendition Motion Compensation Format (HMRR)',
+'i263': u'Intel ITU H.263 Videoconferencing (i263)',
+'I420': u'Intel Indeo 4',
+'IAN ': u'Intel RDX',
+'ICLB': u'InSoft CellB Videoconferencing',
+'IGOR': u'Power DVD',
+'IJPG': u'Intergraph JPEG',
+'ILVC': u'Intel Layered Video',
+'ILVR': u'ITU-T H.263+',
+'IPDV': u'I-O Data Device Giga AVI DV Codec',
+'IR21': u'Intel Indeo 2.1',
+'IRAW': u'Intel YUV Uncompressed',
+'IV30': u'Ligos Indeo 3.0',
+'IV31': u'Ligos Indeo 3.1',
+'IV32': u'Ligos Indeo 3.2',
+'IV33': u'Ligos Indeo 3.3',
+'IV34': u'Ligos Indeo 3.4',
+'IV35': u'Ligos Indeo 3.5',
+'IV36': u'Ligos Indeo 3.6',
+'IV37': u'Ligos Indeo 3.7',
+'IV38': u'Ligos Indeo 3.8',
+'IV39': u'Ligos Indeo 3.9',
+'IV40': u'Ligos Indeo Interactive 4.0',
+'IV41': u'Ligos Indeo Interactive 4.1',
+'IV42': u'Ligos Indeo Interactive 4.2',
+'IV43': u'Ligos Indeo Interactive 4.3',
+'IV44': u'Ligos Indeo Interactive 4.4',
+'IV45': u'Ligos Indeo Interactive 4.5',
+'IV46': u'Ligos Indeo Interactive 4.6',
+'IV47': u'Ligos Indeo Interactive 4.7',
+'IV48': u'Ligos Indeo Interactive 4.8',
+'IV49': u'Ligos Indeo Interactive 4.9',
+'IV50': u'Ligos Indeo Interactive 5.0',
+'JBYR': u'Kensington ?JBYR?',
+'JPEG': u'Still Image JPEG DIB',
+'JPGL': u'Webcam JPEG Light?',
+'KMVC': u'Karl Morton\'s Video Codec',
+'LEAD': u'LEAD Video Codec',
+'Ljpg': u'LEAD MJPEG Codec',
+'M261': u'Microsoft H.261',
+'M263': u'Microsoft H.263',
+'M4S2': u'Microsoft MPEG-4 (M4S2)',
+'m4s2': u'Microsoft MPEG-4 (m4s2)',
+'MC12': u'ATI Motion Compensation Format (MC12)',
+'MCAM': u'ATI Motion Compensation Format (MCAM)',
+'MJ2C': u'Morgan Multimedia Motion JPEG2000',
+'mJPG': u'IBM Motion JPEG w/ Huffman Tables',
+'MJPG': u'Motion JPEG DIB',
+'MP42': u'Microsoft MPEG-4 (low-motion)',
+'MP43': u'Microsoft MPEG-4 (fast-motion)',
+'MP4S': u'Microsoft MPEG-4 (MP4S)',
+'mp4s': u'Microsoft MPEG-4 (mp4s)',
+'MPEG': u'MPEG 1 Video I-Frame',
+'MPG4': u'Microsoft MPEG-4 Video High Speed Compressor',
+'MPGI': u'Sigma Designs MPEG',
+'MRCA': u'FAST Multimedia Mrcodec',
+'MRCA': u'Martin Regen Codec',
+'MRLE': u'Microsoft RLE',
+'MRLE': u'Run Length Encoding',
+'MSVC': u'Microsoft Video 1',
+'MTX1': u'Matrox ?MTX1?',
+'MTX2': u'Matrox ?MTX2?',
+'MTX3': u'Matrox ?MTX3?',
+'MTX4': u'Matrox ?MTX4?',
+'MTX5': u'Matrox ?MTX5?',
+'MTX6': u'Matrox ?MTX6?',
+'MTX7': u'Matrox ?MTX7?',
+'MTX8': u'Matrox ?MTX8?',
+'MTX9': u'Matrox ?MTX9?',
+'MV12': u'?MV12?',
+'MWV1': u'Aware Motion Wavelets',
+'nAVI': u'?nAVI?',
+'NTN1': u'Nogatech Video Compression 1',
+'NVS0': u'nVidia GeForce Texture (NVS0)',
+'NVS1': u'nVidia GeForce Texture (NVS1)',
+'NVS2': u'nVidia GeForce Texture (NVS2)',
+'NVS3': u'nVidia GeForce Texture (NVS3)',
+'NVS4': u'nVidia GeForce Texture (NVS4)',
+'NVS5': u'nVidia GeForce Texture (NVS5)',
+'NVT0': u'nVidia GeForce Texture (NVT0)',
+'NVT1': u'nVidia GeForce Texture (NVT1)',
+'NVT2': u'nVidia GeForce Texture (NVT2)',
+'NVT3': u'nVidia GeForce Texture (NVT3)',
+'NVT4': u'nVidia GeForce Texture (NVT4)',
+'NVT5': u'nVidia GeForce Texture (NVT5)',
+'PDVC': u'I-O Data Device Digital Video Capture DV codec',
+'PGVV': u'Radius Video Vision',
+'PHMO': u'IBM Photomotion',
+'PIM1': u'Pegasus Imaging ?PIM1?',
+'PIM2': u'Pegasus Imaging ?PIM2?',
+'PIMJ': u'Pegasus Imaging Lossless JPEG',
+'PVEZ': u'Horizons Technology PowerEZ',
+'PVMM': u'PacketVideo Corporation MPEG-4',
+'PVW2': u'Pegasus Imaging Wavelet Compression',
+'QPEG': u'Q-Team QPEG 1.0',
+'qpeq': u'Q-Team QPEG 1.1',
+'RGBT': u'Computer Concepts 32-bit support',
+'RLE ': u'Microsoft Run Length Encoder',
+'RLE4': u'Run Length Encoded 4',
+'RLE8': u'Run Length Encoded 8',
+'RT21': u'Intel Indeo 2.1',
+'RT21': u'Intel Real Time Video 2.1',
+'rv20': u'RealVideo G2',
+'rv30': u'RealVideo 8',
+'RVX ': u'Intel RDX (RVX )',
+'s422': u'Tekram VideoCap C210 YUV 4:2:2',
+'SDCC': u'Sun Communication Digital Camera Codec',
+'SFMC': u'CrystalNet Surface Fitting Method',
+'SMSC': u'Radius SMSC',
+'SMSD': u'Radius SMSD',
+'smsv': u'WorldConnect Wavelet Video',
+'SPIG': u'Radius Spigot',
+'SPLC': u'Splash Studios ACM Audio Codec',
+'SQZ2': u'Microsoft VXTreme Video Codec V2',
+'STVA': u'ST CMOS Imager Data (Bayer)',
+'STVB': u'ST CMOS Imager Data (Nudged Bayer)',
+'STVC': u'ST CMOS Imager Data (Bunched)',
+'STVX': u'ST CMOS Imager Data (Extended CODEC Data Format)',
+'STVY': u'ST CMOS Imager Data (Extended CODEC Data Format with Correction Data)',
+'SV10': u'Sorenson Video R1',
+'SVQ1': u'Sorenson Video',
+'SVQ1': u'Sorenson Video R3',
+'TLMS': u'TeraLogic Motion Intraframe Codec (TLMS)',
+'TLST': u'TeraLogic Motion Intraframe Codec (TLST)',
+'TM20': u'Duck TrueMotion 2.0',
+'TM2X': u'Duck TrueMotion 2X',
+'TMIC': u'TeraLogic Motion Intraframe Codec (TMIC)',
+'TMOT': u'Horizons Technology TrueMotion S',
+'tmot': u'Horizons TrueMotion Video Compression',
+'TR20': u'Duck TrueMotion RealTime 2.0',
+'TSCC': u'TechSmith Screen Capture Codec',
+'TV10': u'Tecomac Low-Bit Rate Codec',
+'TY0N': u'Trident ?TY0N?',
+'TY2C': u'Trident ?TY2C?',
+'TY2N': u'Trident ?TY2N?',
+'UCOD': u'eMajix.com ClearVideo',
+'ULTI': u'IBM Ultimotion',
+'UYVY': u'UYVY 4:2:2 byte ordering',
+'V261': u'Lucent VX2000S',
+'V422': u'24 bit YUV 4:2:2 Format',
+'V655': u'16 bit YUV 4:2:2 Format',
+'VCR1': u'ATI VCR 1.0',
+'VCR2': u'ATI VCR 2.0',
+'VCR3': u'ATI VCR 3.0',
+'VCR4': u'ATI VCR 4.0',
+'VCR5': u'ATI VCR 5.0',
+'VCR6': u'ATI VCR 6.0',
+'VCR7': u'ATI VCR 7.0',
+'VCR8': u'ATI VCR 8.0',
+'VCR9': u'ATI VCR 9.0',
+'VDCT': u'Video Maker Pro DIB',
+'VDOM': u'VDOnet VDOWave',
+'VDOW': u'VDOnet VDOLive (H.263)',
+'VDTZ': u'Darim Vison VideoTizer YUV',
+'VGPX': u'VGPixel Codec',
+'VIDS': u'Vitec Multimedia YUV 4:2:2 CCIR 601 for V422',
+'VIDS': u'YUV 4:2:2 CCIR 601 for V422',
+'VIFP': u'?VIFP?',
+'VIVO': u'Vivo H.263 v2.00',
+'vivo': u'Vivo H.263',
+'VIXL': u'Miro Video XL',
+'VLV1': u'Videologic VLCAP.DRV',
+'VP30': u'On2 VP3.0',
+'VP31': u'On2 VP3.1',
+'VX1K': u'VX1000S Video Codec',
+'VX2K': u'VX2000S Video Codec',
+'VXSP': u'VX1000SP Video Codec',
+'WBVC': u'Winbond W9960',
+'WHAM': u'Microsoft Video 1 (WHAM)',
+'WINX': u'Winnov Software Compression',
+'WJPG': u'AverMedia Winbond JPEG',
+'WMV1': u'Windows Media Video V7',
+'WMV2': u'Windows Media Video V8',
+'WMV3': u'Windows Media Video V9',
+'WNV1': u'Winnov Hardware Compression',
+'x263': u'Xirlink H.263',
+'XLV0': u'NetXL Video Decoder',
+'XMPG': u'Xing MPEG (I-Frame only)',
+'XVID': u'XviD MPEG-4',
+'XXAN': u'?XXAN?',
+'Y211': u'YUV 2:1:1 Packed',
+'Y411': u'YUV 4:1:1 Packed',
+'Y41B': u'YUV 4:1:1 Planar',
+'Y41P': u'PC1 4:1:1',
+'Y41T': u'PC1 4:1:1 with transparency',
+'Y42B': u'YUV 4:2:2 Planar',
+'Y42T': u'PCI 4:2:2 with transparency',
+'Y8  ': u'Grayscale video',
+'YC12': u'Intel YUV 12 codec',
+'YC12': u'Intel YUV12 Codec',
+'YUV8': u'Winnov Caviar YUV8',
+'YUV9': u'Intel YUV9',
+'YUY2': u'Uncompressed YUV 4:2:2',
+'YUYV': u'Canopus YUV',
+'YV12': u'YVU12 Planar',
+'YVU9': u'Intel YVU9 Planar',
+'YVYU': u'YVYU 4:2:2 byte ordering',
+'ZLIB': u'?ZLIB?',
+'ZPEG': u'Metheus Video Zipper'
+}
+
diff --git a/lib/hachoir_parser/video/mov.py b/lib/hachoir_parser/video/mov.py
new file mode 100644
index 0000000000000000000000000000000000000000..f6b0a8ab6ff1af30bc9582efdd290cb5ee283999
--- /dev/null
+++ b/lib/hachoir_parser/video/mov.py
@@ -0,0 +1,246 @@
+"""
+Apple Quicktime Movie (file extension ".mov") parser.
+
+Documents:
+- Parsing and Writing QuickTime Files in Java (by Chris Adamson, 02/19/2003)
+  http://www.onjava.com/pub/a/onjava/2003/02/19/qt_file_format.html
+- QuickTime File Format (official technical reference)
+  http://developer.apple.com/documentation/QuickTime/QTFF/qtff.pdf
+- Apple QuickTime:
+  http://wiki.multimedia.cx/index.php?title=Apple_QuickTime
+- File type (ftyp):
+  http://www.ftyps.com/
+
+Author: Victor Stinner
+Creation: 2 august 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (ParserError, FieldSet, MissingField,
+    UInt8, Int16, UInt16, UInt32, TimestampMac32,
+    String, PascalString8, CString,
+    RawBytes, PaddingBytes)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+class QTFloat32(FieldSet):
+    static_size = 32
+    def createFields(self):
+        yield Int16(self, "int_part")
+        yield UInt16(self, "float_part")
+    def createValue(self):
+        return self["int_part"].value + float(self["float_part"].value) / 65535
+    def createDescription(self):
+        return str(self.value)
+
+class AtomList(FieldSet):
+    def createFields(self):
+        while not self.eof:
+            yield Atom(self, "atom[]")
+
+class TrackHeader(FieldSet):
+    def createFields(self):
+        yield textHandler(UInt8(self, "version"), hexadecimal)
+
+        # TODO: sum of :
+        # TrackEnabled = 1;
+        # TrackInMovie = 2;
+        # TrackInPreview = 4;
+        # TrackInPoster = 8
+        yield RawBytes(self, "flags", 3)
+
+        yield TimestampMac32(self, "creation_date")
+        yield TimestampMac32(self, "lastmod_date")
+        yield UInt32(self, "track_id")
+        yield PaddingBytes(self, "reserved[]", 8)
+        yield UInt32(self, "duration")
+        yield PaddingBytes(self, "reserved[]", 8)
+        yield Int16(self, "video_layer", "Middle is 0, negative in front")
+        yield PaddingBytes(self, "other", 2)
+        yield QTFloat32(self, "geom_a", "Width scale")
+        yield QTFloat32(self, "geom_b", "Width rotate")
+        yield QTFloat32(self, "geom_u", "Width angle")
+        yield QTFloat32(self, "geom_c", "Height rotate")
+        yield QTFloat32(self, "geom_d", "Height scale")
+        yield QTFloat32(self, "geom_v", "Height angle")
+        yield QTFloat32(self, "geom_x", "Position X")
+        yield QTFloat32(self, "geom_y", "Position Y")
+        yield QTFloat32(self, "geom_w", "Divider scale")
+        yield QTFloat32(self, "frame_size_width")
+        yield QTFloat32(self, "frame_size_height")
+
+class HDLR(FieldSet):
+    def createFields(self):
+        yield textHandler(UInt8(self, "version"), hexadecimal)
+        yield RawBytes(self, "flags", 3)
+        yield String(self, "subtype", 8)
+        yield String(self, "manufacturer", 4)
+        yield UInt32(self, "res_flags")
+        yield UInt32(self, "res_flags_mask")
+        if self.root.is_mpeg4:
+            yield CString(self, "name")
+        else:
+            yield PascalString8(self, "name")
+
+class MediaHeader(FieldSet):
+    def createFields(self):
+        yield textHandler(UInt8(self, "version"), hexadecimal)
+        yield RawBytes(self, "flags", 3)
+        yield TimestampMac32(self, "creation_date")
+        yield TimestampMac32(self, "lastmod_date")
+        yield UInt32(self, "time_scale")
+        yield UInt32(self, "duration")
+        yield UInt16(self, "mac_lang")
+        yield Int16(self, "quality")
+
+class ELST(FieldSet):
+    def createFields(self):
+        yield textHandler(UInt8(self, "version"), hexadecimal)
+        yield RawBytes(self, "flags", 3)
+        yield UInt32(self, "nb_edits")
+        yield UInt32(self, "length")
+        yield UInt32(self, "start")
+        yield QTFloat32(self, "playback_speed")
+
+class Load(FieldSet):
+    def createFields(self):
+        yield UInt32(self, "start")
+        yield UInt32(self, "length")
+        yield UInt32(self, "flags") # PreloadAlways = 1 or TrackEnabledPreload = 2
+        yield UInt32(self, "hints") # KeepInBuffer = 0x00000004; HighQuality = 0x00000100; SingleFieldVideo = 0x00100000
+
+class MovieHeader(FieldSet):
+    def createFields(self):
+        yield textHandler(UInt8(self, "version"), hexadecimal)
+        yield RawBytes(self, "flags", 3)
+        yield TimestampMac32(self, "creation_date")
+        yield TimestampMac32(self, "lastmod_date")
+        yield UInt32(self, "time_scale")
+        yield UInt32(self, "duration")
+        yield QTFloat32(self, "play_speed")
+        yield UInt16(self, "volume")
+        yield PaddingBytes(self, "reserved[]", 10)
+        yield QTFloat32(self, "geom_a", "Width scale")
+        yield QTFloat32(self, "geom_b", "Width rotate")
+        yield QTFloat32(self, "geom_u", "Width angle")
+        yield QTFloat32(self, "geom_c", "Height rotate")
+        yield QTFloat32(self, "geom_d", "Height scale")
+        yield QTFloat32(self, "geom_v", "Height angle")
+        yield QTFloat32(self, "geom_x", "Position X")
+        yield QTFloat32(self, "geom_y", "Position Y")
+        yield QTFloat32(self, "geom_w", "Divider scale")
+        yield UInt32(self, "preview_start")
+        yield UInt32(self, "preview_length")
+        yield UInt32(self, "still_poster")
+        yield UInt32(self, "sel_start")
+        yield UInt32(self, "sel_length")
+        yield UInt32(self, "current_time")
+        yield UInt32(self, "next_track")
+
+class FileType(FieldSet):
+    def createFields(self):
+        yield String(self, "brand", 4, "Major brand")
+        yield UInt32(self, "version", "Version")
+        while not self.eof:
+            yield String(self, "compat_brand[]", 4, "Compatible brand")
+
+class Atom(FieldSet):
+    tag_info = {
+        # TODO: Use dictionnary of dictionnary, like Matroska parser does
+        # "elst" is a child of "edts", but not of "moov" for example
+        "moov": (AtomList, "movie", "Movie"),
+        "trak": (AtomList, "track", "Track"),
+        "mdia": (AtomList, "media", "Media"),
+        "edts": (AtomList, "edts", ""),
+        "minf": (AtomList, "minf", ""),
+        "stbl": (AtomList, "stbl", ""),
+        "dinf": (AtomList, "dinf", ""),
+        "elst": (ELST, "edts", ""),
+        "tkhd": (TrackHeader, "track_hdr", "Track header"),
+        "hdlr": (HDLR, "hdlr", ""),
+        "mdhd": (MediaHeader, "media_hdr", "Media header"),
+        "load": (Load, "load", ""),
+        "mvhd": (MovieHeader, "movie_hdr", "Movie header"),
+        "ftyp": (FileType, "file_type", "File type"),
+    }
+    tag_handler = [ item[0] for item in tag_info ]
+    tag_desc = [ item[1] for item in tag_info ]
+
+    def createFields(self):
+        yield UInt32(self, "size")
+        yield String(self, "tag", 4)
+        size = self["size"].value
+        if size == 1:
+            raise ParserError("Extended size is not supported!")
+            #yield UInt64(self, "size64")
+            size = self["size64"].value
+        elif size == 0:
+            #size = (self.root.size - self.root.current_size - self.current_size) / 8
+            if self._size is None:
+                size = (self.parent.size - self.current_size) / 8 - 8
+            else:
+                size = (self.size - self.current_size) / 8
+        else:
+            size = size - 8
+        if 0 < size:
+            tag = self["tag"].value
+            if tag in self.tag_info:
+                handler, name, desc = self.tag_info[tag]
+                yield handler(self, name, desc, size=size*8)
+            else:
+                yield RawBytes(self, "data", size)
+
+    def createDescription(self):
+        return "Atom: %s" % self["tag"].value
+
+class MovFile(Parser):
+    PARSER_TAGS = {
+        "id": "mov",
+        "category": "video",
+        "file_ext": ("mov", "qt", "mp4", "m4v", "m4a", "m4p", "m4b"),
+        "mime": (u"video/quicktime", u'video/mp4'),
+        "min_size": 8*8,
+        "magic": (("moov", 4*8),),
+        "description": "Apple QuickTime movie"
+    }
+    BRANDS = {
+        # File type brand => MIME type
+        'mp41': u'video/mp4',
+        'mp42': u'video/mp4',
+    }
+    endian = BIG_ENDIAN
+
+    def __init__(self, *args, **kw):
+        Parser.__init__(self, *args, **kw)
+        self.is_mpeg4 = False
+
+    def validate(self):
+        # TODO: Write better code, erk!
+        size = self.stream.readBits(0, 32, self.endian)
+        if size < 8:
+            return "Invalid first atom size"
+        tag = self.stream.readBytes(4*8, 4)
+        return tag in ("ftyp", "moov", "free")
+
+    def createFields(self):
+        while not self.eof:
+            yield Atom(self, "atom[]")
+
+    def createMimeType(self):
+        first = self[0]
+        try:
+            # Read brands in the file type
+            if first['tag'].value != "ftyp":
+                return None
+            file_type = first["file_type"]
+            brand = file_type["brand"].value
+            if brand in self.BRANDS:
+                return self.BRANDS[brand]
+            for field in file_type.array("compat_brand"):
+                brand = field.value
+                if brand in self.BRANDS:
+                    return self.BRANDS[brand]
+        except MissingField:
+            pass
+        return None
+
diff --git a/lib/hachoir_parser/video/mpeg_ts.py b/lib/hachoir_parser/video/mpeg_ts.py
new file mode 100644
index 0000000000000000000000000000000000000000..56b9bc85e02a2abf790af0835b0f575797c400d6
--- /dev/null
+++ b/lib/hachoir_parser/video/mpeg_ts.py
@@ -0,0 +1,102 @@
+"""
+MPEG-2 Transport Stream parser.
+
+Documentation:
+- MPEG-2 Transmission
+  http://erg.abdn.ac.uk/research/future-net/digital-video/mpeg2-trans.html
+
+Author: Victor Stinner
+Creation date: 13 january 2007
+"""
+
+from hachoir_parser import Parser
+from hachoir_core.field import (FieldSet, ParserError, MissingField,
+    UInt8, Enum, Bit, Bits, RawBytes)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+class Packet(FieldSet):
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        if self["has_error"].value:
+            self._size = 204*8
+        else:
+            self._size = 188*8
+
+    PID = {
+        0x0000: "Program Association Table (PAT)",
+        0x0001: "Conditional Access Table (CAT)",
+        # 0x0002..0x000f: reserved
+        # 0x0010..0x1FFE: network PID, program map PID, elementary PID, etc.
+        # TODO: Check above values
+        #0x0044: "video",
+        #0x0045: "audio",
+        0x1FFF: "Null packet",
+    }
+
+    def createFields(self):
+        yield textHandler(UInt8(self, "sync", 8), hexadecimal)
+        if self["sync"].value != 0x47:
+            raise ParserError("MPEG-2 TS: Invalid synchronization byte")
+        yield Bit(self, "has_error")
+        yield Bit(self, "payload_unit_start")
+        yield Bit(self, "priority")
+        yield Enum(textHandler(Bits(self, "pid", 13, "Program identifier"), hexadecimal), self.PID)
+        yield Bits(self, "scrambling_control", 2)
+        yield Bit(self, "has_adaptation")
+        yield Bit(self, "has_payload")
+        yield Bits(self, "counter", 4)
+        yield RawBytes(self, "payload", 184)
+        if self["has_error"].value:
+            yield RawBytes(self, "error_correction", 16)
+
+    def createDescription(self):
+        text = "Packet: PID %s" % self["pid"].display
+        if self["payload_unit_start"].value:
+            text += ", start of payload"
+        return text
+
+    def isValid(self):
+        if not self["has_payload"].value and not self["has_adaptation"].value:
+            return u"No payload and no adaptation"
+        pid = self["pid"].value
+        if (0x0002 <= pid <= 0x000f) or (0x2000 <= pid):
+            return u"Invalid program identifier (%s)" % self["pid"].display
+        return ""
+
+class MPEG_TS(Parser):
+    PARSER_TAGS = {
+        "id": "mpeg_ts",
+        "category": "video",
+        "file_ext": ("ts",),
+        "min_size": 188*8,
+        "description": u"MPEG-2 Transport Stream"
+    }
+    endian = BIG_ENDIAN
+
+    def validate(self):
+        sync = self.stream.searchBytes("\x47", 0, 204*8)
+        if sync is None:
+            return "Unable to find synchronization byte"
+        for index in xrange(5):
+            try:
+                packet = self["packet[%u]" % index]
+            except (ParserError, MissingField):
+                if index and self.eof:
+                    return True
+                else:
+                    return "Unable to get packet #%u" % index
+            err = packet.isValid()
+            if err:
+                return "Packet #%u is invalid: %s" % (index, err)
+        return True
+
+    def createFields(self):
+        sync = self.stream.searchBytes("\x47", 0, 204*8)
+        if sync is None:
+            raise ParserError("Unable to find synchronization byte")
+        elif sync:
+            yield RawBytes(self, "incomplete_packet", sync//8)
+        while not self.eof:
+            yield Packet(self, "packet[]")
+
diff --git a/lib/hachoir_parser/video/mpeg_video.py b/lib/hachoir_parser/video/mpeg_video.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a5d51c823b7cb5969322a3976c61f7225c0848c
--- /dev/null
+++ b/lib/hachoir_parser/video/mpeg_video.py
@@ -0,0 +1,576 @@
+"""
+Moving Picture Experts Group (MPEG) video version 1 and 2 parser.
+
+Information:
+- http://www.mpucoder.com/DVD/
+- http://dvd.sourceforge.net/dvdinfo/
+- http://www.mit.jyu.fi/mweber/leffakone/software/parsempegts/
+- http://homepage.mac.com/rnc/EditMpegHeaderIFO.html
+- http://standards.iso.org/ittf/PubliclyAvailableStandards/c025029_ISO_IEC_TR_11172-5_1998(E)_Software_Simulation.zip
+    This is a sample encoder/decoder implementation for MPEG-1.
+
+Author: Victor Stinner
+Creation date: 15 september 2006
+"""
+
+from hachoir_parser import Parser
+from hachoir_parser.audio.mpeg_audio import MpegAudioFile
+from hachoir_core.field import (FieldSet,
+    FieldError, ParserError,
+    Bit, Bits, Bytes, RawBits, PaddingBits, NullBits,
+    UInt8, UInt16,
+    RawBytes, PaddingBytes,
+    Enum)
+from hachoir_core.endian import BIG_ENDIAN
+from hachoir_core.stream import StringInputStream
+from hachoir_core.text_handler import textHandler, hexadecimal
+
+class FragmentGroup:
+    def __init__(self, parser):
+        self.items = []
+        self.parser = parser
+        self.args = {}
+
+    def add(self, item):
+        self.items.append(item)
+
+    def createInputStream(self):
+        # FIXME: Use lazy stream creation
+        data = []
+        for item in self.items:
+            if 'rawdata' in item:
+                data.append( item["rawdata"].value )
+        data = "".join(data)
+
+        # FIXME: Use smarter code to send arguments
+        tags = {"class": self.parser, "args": self.args}
+        tags = tags.iteritems()
+        return StringInputStream(data, "<fragment group>", tags=tags)
+
+class CustomFragment(FieldSet):
+    def __init__(self, parent, name, size, parser, description=None, group=None):
+        FieldSet.__init__(self, parent, name, description, size=size)
+        if not group:
+            group = FragmentGroup(parser)
+        self.group = group
+        self.group.add(self)
+
+    def createFields(self):
+        yield RawBytes(self, "rawdata", self.size//8)
+
+    def _createInputStream(self, **args):
+        return self.group.createInputStream()
+
+class Timestamp(FieldSet):
+    static_size = 36
+
+    def createValue(self):
+        return (self["c"].value << 30) + (self["b"].value << 15) + self["a"].value
+
+    def createFields(self):
+        yield Bits(self, "c", 3)
+        yield Bit(self, "sync[]") # =True
+        yield Bits(self, "b", 15)
+        yield Bit(self, "sync[]") # =True
+        yield Bits(self, "a", 15)
+        yield Bit(self, "sync[]") # =True
+
+class SCR(FieldSet):
+    static_size = 35
+
+    def createFields(self):
+        yield Bits(self, "scr_a", 3)
+        yield Bit(self, "sync[]") # =True
+        yield Bits(self, "scr_b", 15)
+        yield Bit(self, "sync[]") # =True
+        yield Bits(self, "scr_c", 15)
+
+class PackHeader(FieldSet):
+    def createFields(self):
+        if self.stream.readBits(self.absolute_address, 2, self.endian) == 1:
+            # MPEG version 2
+            yield Bits(self, "sync[]", 2)
+            yield SCR(self, "scr")
+            yield Bit(self, "sync[]")
+            yield Bits(self, "scr_ext", 9)
+            yield Bit(self, "sync[]")
+            yield Bits(self, "mux_rate", 22)
+            yield Bits(self, "sync[]", 2)
+            yield PaddingBits(self, "reserved", 5, pattern=1)
+            yield Bits(self, "stuffing_length", 3)
+            count = self["stuffing_length"].value
+            if count:
+                yield PaddingBytes(self, "stuffing", count, pattern="\xff")
+        else:
+            # MPEG version 1
+            yield Bits(self, "sync[]", 4)
+            yield Bits(self, "scr_a", 3)
+            yield Bit(self, "sync[]")
+            yield Bits(self, "scr_b", 15)
+            yield Bit(self, "sync[]")
+            yield Bits(self, "scr_c", 15)
+            yield Bits(self, "sync[]", 2)
+            yield Bits(self, "mux_rate", 22)
+            yield Bit(self, "sync[]")
+
+    def validate(self):
+        if self["mux_rate"].value == 0:
+            return "Invalid mux rate"
+        sync0 = self["sync[0]"]
+        if (sync0.size == 2 and sync0.value == 1):
+            # MPEG2
+            pass
+            if not self["sync[1]"].value \
+            or not self["sync[2]"].value \
+            or self["sync[3]"].value != 3:
+                return "Invalid synchronisation bits"
+        elif (sync0.size == 4 and sync0.value == 2):
+            # MPEG1
+            if not self["sync[1]"].value \
+            or not self["sync[2]"].value \
+            or self["sync[3]"].value != 3 \
+            or not self["sync[4]"].value:
+                return "Invalid synchronisation bits"
+        else:
+            return "Unknown version"
+        return True
+
+class SystemHeader(FieldSet):
+    def createFields(self):
+        yield Bits(self, "marker[]", 1)
+        yield Bits(self, "rate_bound", 22)
+        yield Bits(self, "marker[]", 1)
+        yield Bits(self, "audio_bound", 6)
+        yield Bit(self, "fixed_bitrate")
+        yield Bit(self, "csps", description="Constrained system parameter stream")
+        yield Bit(self, "audio_lock")
+        yield Bit(self, "video_lock")
+        yield Bits(self, "marker[]", 1)
+        yield Bits(self, "video_bound", 5)
+        length = self['../length'].value-5
+        if length:
+            yield RawBytes(self, "raw[]", length)
+
+class defaultParser(FieldSet):
+    def createFields(self):
+        yield RawBytes(self, "data", self["../length"].value)
+
+class Padding(FieldSet):
+    def createFields(self):
+        yield PaddingBytes(self, "data", self["../length"].value)
+
+class VideoExtension2(FieldSet):
+    def createFields(self):
+        yield Bit(self, "sync[]") # =True
+        yield Bits(self, "ext_length", 7)
+        yield NullBits(self, "reserved[]", 8)
+        size = self["ext_length"].value
+        if size:
+            yield RawBytes(self, "ext_bytes", size)
+
+class VideoExtension1(FieldSet):
+    def createFields(self):
+        yield Bit(self, "has_private")
+        yield Bit(self, "has_pack_lgth")
+        yield Bit(self, "has_pack_seq")
+        yield Bit(self, "has_pstd_buffer")
+        yield Bits(self, "sync[]", 3) # =7
+        yield Bit(self, "has_extension2")
+
+        if self["has_private"].value:
+            yield RawBytes(self, "private", 16)
+
+        if self["has_pack_lgth"].value:
+            yield UInt8(self, "pack_lgth")
+
+        if self["has_pack_seq"].value:
+            yield Bit(self, "sync[]") # =True
+            yield Bits(self, "pack_seq_counter", 7)
+            yield Bit(self, "sync[]") # =True
+            yield Bit(self, "mpeg12_id")
+            yield Bits(self, "orig_stuffing_length", 6)
+
+        if self["has_pstd_buffer"].value:
+            yield Bits(self, "sync[]", 2) # =1
+            yield Enum(Bit(self, "pstd_buffer_scale"),
+                {True: "128 bytes", False: "1024 bytes"})
+            yield Bits(self, "pstd_size", 13)
+
+class VideoSeqHeader(FieldSet):
+    ASPECT=["forbidden", "1.0000 (VGA etc.)", "0.6735",
+            "0.7031 (16:9, 625line)", "0.7615", "0.8055",
+            "0.8437 (16:9, 525line)", "0.8935",
+            "0.9157 (CCIR601, 625line)", "0.9815", "1.0255", "1.0695",
+            "1.0950 (CCIR601, 525line)", "1.1575", "1.2015", "reserved"]
+    FRAMERATE=["forbidden", "23.976 fps", "24 fps", "25 fps", "29.97 fps",
+               "30 fps", "50 fps", "59.94 fps", "60 fps"]
+    def createFields(self):
+        yield Bits(self, "width", 12)
+        yield Bits(self, "height", 12)
+        yield Enum(Bits(self, "aspect", 4), self.ASPECT)
+        yield Enum(Bits(self, "frame_rate", 4), self.FRAMERATE)
+        yield Bits(self, "bit_rate", 18, "Bit rate in units of 50 bytes")
+        yield Bits(self, "sync[]", 1) # =1
+        yield Bits(self, "vbv_size", 10, "Video buffer verifier size, in units of 16768")
+        yield Bit(self, "constrained_params_flag")
+        yield Bit(self, "has_intra_quantizer")
+        if self["has_intra_quantizer"].value:
+            for i in range(64):
+                yield Bits(self, "intra_quantizer[]", 8)
+        yield Bit(self, "has_non_intra_quantizer")
+        if self["has_non_intra_quantizer"].value:
+            for i in range(64):
+                yield Bits(self, "non_intra_quantizer[]", 8)
+
+class GroupStart(FieldSet):
+    def createFields(self):
+        yield Bit(self, "drop_frame")
+        yield Bits(self, "time_hh", 5)
+        yield Bits(self, "time_mm", 6)
+        yield PaddingBits(self, "time_pad[]", 1)
+        yield Bits(self, "time_ss", 6)
+        yield Bits(self, "time_ff", 6)
+        yield Bit(self, "closed_group")
+        yield Bit(self, "broken_group")
+        yield PaddingBits(self, "pad[]", 5)
+
+class PacketElement(FieldSet):
+    def createFields(self):
+        yield Bits(self, "sync[]", 2) # =2
+        if self["sync[0]"].value != 2:
+            raise ParserError("Unknown video elementary data")
+        yield Bits(self, "is_scrambled", 2)
+        yield Bits(self, "priority", 1)
+        yield Bit(self, "alignment")
+        yield Bit(self, "is_copyrighted")
+        yield Bit(self, "is_original")
+        yield Bit(self, "has_pts", "Presentation Time Stamp")
+        yield Bit(self, "has_dts", "Decode Time Stamp")
+        yield Bit(self, "has_escr", "Elementary Stream Clock Reference")
+        yield Bit(self, "has_es_rate", "Elementary Stream rate")
+        yield Bit(self, "dsm_trick_mode")
+        yield Bit(self, "has_copy_info")
+        yield Bit(self, "has_prev_crc", "If True, previous PES packet CRC follows")
+        yield Bit(self, "has_extension")
+        yield UInt8(self, "size")
+
+        # Time stamps
+        if self["has_pts"].value:
+            yield Bits(self, "sync[]", 4) # =2, or 3 if has_dts=True
+            yield Timestamp(self, "pts")
+        if self["has_dts"].value:
+            if not(self["has_pts"].value):
+                raise ParserError("Invalid PTS/DTS values")
+            yield Bits(self, "sync[]", 4) # =1
+            yield Timestamp(self, "dts")
+
+        if self["has_escr"].value:
+            yield Bits(self, "sync[]", 2) # =0
+            yield SCR(self, "escr")
+
+        if self["has_es_rate"].value:
+            yield Bit(self, "sync[]") # =True
+            yield Bits(self, "es_rate", 14) # in units of 50 bytes/second
+            yield Bit(self, "sync[]") # =True
+
+        if self["has_copy_info"].value:
+            yield Bit(self, "sync[]") # =True
+            yield Bits(self, "copy_info", 7)
+
+        if self["has_prev_crc"].value:
+            yield textHandler(UInt16(self, "prev_crc"), hexadecimal)
+
+        # --- Extension ---
+        if self["has_extension"].value:
+            yield VideoExtension1(self, "extension")
+            if self["extension/has_extension2"].value:
+                yield VideoExtension2(self, "extension2")
+
+class VideoExtension(FieldSet):
+    EXT_TYPE = {1:'Sequence',2:'Sequence Display',8:'Picture Coding'}
+    def createFields(self):
+        yield Enum(Bits(self, "ext_type", 4), self.EXT_TYPE)
+        ext_type=self['ext_type'].value
+        if ext_type==1:
+            # Sequence extension
+            yield Bits(self, 'profile_and_level', 8)
+            yield Bit(self, 'progressive_sequence')
+            yield Bits(self, 'chroma_format', 2)
+            yield Bits(self, 'horiz_size_ext', 2)
+            yield Bits(self, 'vert_size_ext', 2)
+            yield Bits(self, 'bit_rate_ext', 12)
+            yield Bits(self, 'pad[]', 1)
+            yield Bits(self, 'vbv_buffer_size_ext', 8)
+            yield Bit(self, 'low_delay')
+            yield Bits(self, 'frame_rate_ext_n', 2)
+            yield Bits(self, 'frame_rate_ext_d', 5)
+        elif ext_type==2:
+            # Sequence Display extension
+            yield Bits(self, 'video_format', 3)
+            yield Bit(self, 'color_desc_present')
+            if self['color_desc_present'].value:
+                yield UInt8(self, 'color_primaries')
+                yield UInt8(self, 'transfer_characteristics')
+                yield UInt8(self, 'matrix_coeffs')
+            yield Bits(self, 'display_horiz_size', 14)
+            yield Bits(self, 'pad[]', 1)
+            yield Bits(self, 'display_vert_size', 14)
+            yield NullBits(self, 'pad[]', 3)
+        elif ext_type==8:
+            yield Bits(self, 'f_code[0][0]', 4, description="forward horizontal")
+            yield Bits(self, 'f_code[0][1]', 4, description="forward vertical")
+            yield Bits(self, 'f_code[1][0]', 4, description="backward horizontal")
+            yield Bits(self, 'f_code[1][1]', 4, description="backward vertical")
+            yield Bits(self, 'intra_dc_precision', 2)
+            yield Bits(self, 'picture_structure', 2)
+            yield Bit(self, 'top_field_first')
+            yield Bit(self, 'frame_pred_frame_dct')
+            yield Bit(self, 'concealment_motion_vectors')
+            yield Bit(self, 'q_scale_type')
+            yield Bit(self, 'intra_vlc_format')
+            yield Bit(self, 'alternate_scan')
+            yield Bit(self, 'repeat_first_field')
+            yield Bit(self, 'chroma_420_type')
+            yield Bit(self, 'progressive_frame')
+            yield Bit(self, 'composite_display')
+            if self['composite_display'].value:
+                yield Bit(self, 'v_axis')
+                yield Bits(self, 'field_sequence', 3)
+                yield Bit(self, 'sub_carrier')
+                yield Bits(self, 'burst_amplitude', 7)
+                yield Bits(self, 'sub_carrier_phase', 8)
+                yield NullBits(self, 'pad[]', 2)
+            else:
+                yield NullBits(self, 'pad[]', 6)
+        else:
+            yield RawBits(self, "raw[]", 4)
+
+class VideoPicture(FieldSet):
+    CODING_TYPE = ["forbidden","intra-coded (I)",
+                   "predictive-coded (P)",
+                   "bidirectionally-predictive-coded (B)",
+                   "dc intra-coded (D)", "reserved",
+                   "reserved", "reserved"]
+    def createFields(self):
+        yield Bits(self, "temporal_ref", 10)
+        yield Enum(Bits(self, "coding_type", 3), self.CODING_TYPE)
+        yield Bits(self, "vbv_delay", 16)
+        if self['coding_type'].value in (2,3):
+            # predictive coding
+            yield Bit(self, 'full_pel_fwd_vector')
+            yield Bits(self, 'forward_f_code', 3)
+        if self['coding_type'].value == 3:
+            # bidi predictive coding
+            yield Bit(self, 'full_pel_back_vector')
+            yield Bits(self, 'backward_f_code', 3)
+        yield Bits(self, "padding", 8-(self.current_size % 8))
+
+class VideoSlice(FieldSet):
+    def createFields(self):
+        yield Bits(self, "quantizer_scale", 5)
+        start=self.absolute_address+self.current_size+3
+        pos=self.stream.searchBytes('\0\0\1',start,start+1024*1024*8) # seek forward by at most 1MB
+        if pos is None: pos=self.root.size
+        yield RawBits(self, "data", pos-start+3)
+
+class VideoChunk(FieldSet):
+    tag_info = {
+        0x00: ("pict_start[]",   VideoPicture,  "Picture start"),
+        0xB2: ("data_start[]",   None,          "Data start"),
+        0xB3: ("seq_hdr[]",      VideoSeqHeader,"Sequence header"),
+        0xB4: ("seq_err[]",      None,          "Sequence error"),
+        0xB5: ("ext_start[]",    VideoExtension,"Extension start"),
+        0xB7: ("seq_end[]",      None,          "Sequence end"),
+        0xB8: ("group_start[]",  GroupStart,    "Group start"),
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        tag = self["tag"].value
+        if tag in self.tag_info:
+            self._name, self.parser, self._description = self.tag_info[tag]
+            if not self.parser:
+                self.parser = defaultParser
+        elif 0x01 <= tag <= 0xaf:
+            self._name, self.parser, self._description = ('slice[]', VideoSlice, 'Picture slice')
+        else:
+            self.parser = defaultParser
+
+    def createFields(self):
+        yield Bytes(self, "sync", 3)
+        yield textHandler(UInt8(self, "tag"), hexadecimal)
+        if self.parser and self['tag'].value != 0xb7:
+            yield self.parser(self, "content")
+
+class VideoStream(Parser):
+    endian = BIG_ENDIAN
+    def createFields(self):
+        while self.current_size < self.size:
+            pos=self.stream.searchBytes('\0\0\1',self.current_size,self.current_size+1024*1024*8) # seek forward by at most 1MB
+            if pos is not None:
+                padsize = pos-self.current_size
+                if padsize:
+                    yield PaddingBytes(self, "pad[]", padsize//8)
+            yield VideoChunk(self, "chunk[]")
+
+class Stream(FieldSet):
+    def createFields(self):
+        padding=0
+        position=0
+        while True:
+            next=ord(self.parent.stream.readBytes(self.absolute_address+self.current_size+position, 1))
+            if next == 0xff:
+                padding+=1
+                position+=8
+            elif padding:
+                yield PaddingBytes(self, "pad[]", padding)
+                padding=None
+                position=0
+            elif 0x40 <= next <= 0x7f:
+                yield Bits(self, "scale_marker", 2) # 1
+                yield Bit(self, "scale")
+                scale=self['scale'].value
+                if scale:
+                    scaleval=1024
+                else:
+                    scaleval=128
+                yield textHandler(Bits(self, "size", 13), lambda field:str(field.value*scaleval))
+            elif 0x00 <= next <= 0x3f:
+                yield Bits(self, "ts_marker", 2) # 0
+                yield Bit(self, "has_pts")
+                yield Bit(self, "has_dts")
+                if self['has_pts'].value:
+                    yield Timestamp(self, "pts")
+                if self['has_dts'].value:
+                    yield PaddingBits(self, "pad[]", 4)
+                    yield Timestamp(self, "dts")
+                if self.current_size % 8 == 4:
+                    yield PaddingBits(self, "pad[]", 4)
+                break
+            elif 0x80 <= next <= 0xbf:
+                # MPEG-2 extension
+                yield PacketElement(self, "pkt")
+                break
+            else:
+                # 0xc0 - 0xfe: unknown
+                break
+        length = self["../length"].value - self.current_size//8
+        if length:
+            tag=self['../tag'].value
+            group=self.root.streamgroups[tag]
+            parname=self.parent._name
+            if parname.startswith('audio'):
+                frag = CustomFragment(self, "data", length*8, MpegAudioFile, group=group)
+            elif parname.startswith('video'):
+                frag = CustomFragment(self, "data", length*8, VideoStream, group=group)
+            else:
+                frag = CustomFragment(self, "data", length*8, None, group=group)
+            self.root.streamgroups[tag]=frag.group
+            yield frag
+
+class Chunk(FieldSet):
+    ISO_END_CODE = 0xB9
+    tag_info = {
+        0xB9: ("end",            None,          "End"),
+        0xBA: ("pack_start[]",   PackHeader,    "Pack start"),
+        0xBB: ("system_start[]", SystemHeader,  "System start"),
+        # streams
+        0xBD: ("private[]",      Stream,        "Private elementary"),
+        0xBE: ("padding[]",      Stream,        "Padding"),
+        # 0xC0 to 0xFE handled specially
+        0xFF: ("directory[]",    Stream,        "Program Stream Directory"),
+    }
+
+    def __init__(self, *args):
+        FieldSet.__init__(self, *args)
+        if not hasattr(self.root,'streamgroups'):
+            self.root.streamgroups={}
+            for tag in range(0xBC, 0x100):
+                self.root.streamgroups[tag]=None
+        tag = self["tag"].value
+        if tag in self.tag_info:
+            self._name, self.parser, self._description = self.tag_info[tag]
+        elif 0xBC <= tag <= 0xFF:
+            if 0xC0 <= tag < 0xE0:
+                # audio
+                streamid = tag-0xC0
+                self._name, self.parser, self._description = ("audio[%i][]"%streamid, Stream, "Audio Stream %i Packet"%streamid)
+            elif 0xE0 <= tag < 0xF0:
+                # video
+                streamid = tag-0xE0
+                self._name, self.parser, self._description = ("video[%i][]"%streamid, Stream, "Video Stream %i Packet"%streamid)
+            else:
+                self._name, self.parser, self._description = ("stream[]", Stream, "Data Stream Packet")
+        else:
+            self.parser = defaultParser
+        
+        if not self.parser:
+            self.parser = defaultParser
+        elif self.parser != PackHeader and "length" in self:
+            self._size = (6 + self["length"].value) * 8
+
+    def createFields(self):
+        yield Bytes(self, "sync", 3)
+        yield textHandler(UInt8(self, "tag"), hexadecimal)
+        if self.parser:
+            if self.parser != PackHeader:
+                yield UInt16(self, "length")
+                if not self["length"].value:
+                    return
+            yield self.parser(self, "content")
+
+    def createDescription(self):
+        return "Chunk: tag %s" % self["tag"].display
+
+class MPEGVideoFile(Parser):
+    PARSER_TAGS = {
+        "id": "mpeg_video",
+        "category": "video",
+        "file_ext": ("mpeg", "mpg", "mpe", "vob"),
+        "mime": (u"video/mpeg", u"video/mp2p"),
+        "min_size": 12*8,
+#TODO:        "magic": xxx,
+        "description": "MPEG video, version 1 or 2"
+    }
+    endian = BIG_ENDIAN
+    version = None
+
+    def createFields(self):
+        while self.current_size < self.size:
+            pos=self.stream.searchBytes('\0\0\1',self.current_size,self.current_size+1024*1024*8) # seek forward by at most 1MB
+            if pos is not None:
+                padsize = pos-self.current_size
+                if padsize:
+                    yield PaddingBytes(self, "pad[]", padsize//8)
+            chunk=Chunk(self, "chunk[]")
+            try:
+                # force chunk to be processed, so that CustomFragments are complete
+                chunk['content/data']
+            except: pass
+            yield chunk
+
+    def validate(self):
+        try:
+            pack = self[0]
+        except FieldError:
+            return "Unable to create first chunk"
+        if pack.name != "pack_start[0]":
+            return "Invalid first chunk"
+        if pack["sync"].value != "\0\0\1":
+            return "Invalid synchronisation"
+        return pack["content"].validate()
+
+    def getVersion(self):
+        if not self.version:
+            if self["pack_start[0]/content/sync[0]"].size == 2:
+                self.version = 2
+            else:
+                self.version = 1
+        return self.version
+
+    def createDescription(self):
+        if self.getVersion() == 2:
+            return "MPEG-2 video"
+        else:
+            return "MPEG-1 video"
+
diff --git a/sickbeard/image_cache.py b/sickbeard/image_cache.py
new file mode 100644
index 0000000000000000000000000000000000000000..84e90c3418bebd676ade1e2d69717895e1325324
--- /dev/null
+++ b/sickbeard/image_cache.py
@@ -0,0 +1,139 @@
+# Author: Nic Wolfe <nic@wolfeden.ca>
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard.  If not, see <http://www.gnu.org/licenses/>.
+
+import os.path
+import sys
+
+sys.path.append(os.path.abspath('lib'))
+
+import sickbeard
+
+from sickbeard import helpers, logger
+from sickbeard import encodingKludge as ek
+from sickbeard.metadata.generic import GenericMetadata
+
+from lib.hachoir_parser import createParser
+from lib.hachoir_metadata import extractMetadata
+
+class ImageCache:
+    
+    def __init__(self):
+        pass
+    
+    def _cache_dir(self):
+        return ek.ek(os.path.abspath, ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images'))
+
+    def poster_path(self, tvdb_id):
+        poster_file_name = str(tvdb_id) + '.poster.jpg'
+        return ek.ek(os.path.join, self._cache_dir(), poster_file_name)
+    
+    def banner_path(self, tvdb_id):
+        banner_file_name = str(tvdb_id) + '.banner.jpg'
+        return ek.ek(os.path.join, self._cache_dir(), banner_file_name)
+
+    def has_poster(self, tvdb_id):
+        return ek.ek(os.path.isfile, self.poster_path(tvdb_id))
+
+    def has_banner(self, tvdb_id):
+        return ek.ek(os.path.isfile, self.banner_path(tvdb_id))
+
+    BANNER = 1
+    POSTER = 2
+    
+    def which_type(self, path):
+        if not ek.ek(os.path.isfile, path):
+            return None
+        img_parser = createParser(path)
+        img_metadata = extractMetadata(img_parser)
+        img_ratio = float(img_metadata.get('width'))/float(img_metadata.get('height'))
+        
+        if 0.55 < img_ratio < 0.8:
+            return self.POSTER
+        elif 5 < img_ratio < 6:
+            return self.BANNER
+        else:
+            logger.log(u"Image has size ratio of "+str(img_ratio)+", unknown type", logger.WARNING)
+            return None
+    
+    def _cache_image_from_file(self, image_path, img_type, tvdb_id):
+        if img_type == self.POSTER:
+            dest_path = self.poster_path(tvdb_id)
+        elif img_type == self.BANNER:
+            dest_path = self.banner_path(tvdb_id)
+        else:
+            logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
+            return False
+
+        if not ek.ek(os.path.isdir, self._cache_dir()):
+            logger.log(u"Image cache dir didn't exist, creating it at "+str(self._cache_dir()))
+            ek.ek(os.makedirs, self._cache_dir())
+
+        logger.log(u"Copying from "+image_path+" to "+dest_path)
+        helpers.copyFile(image_path, dest_path)
+        
+        return True
+
+    def _cache_image_from_tvdb(self, show_obj, img_type):
+        if img_type == self.POSTER:
+            img_type_name = 'poster'
+            dest_path = self.poster_path(show_obj.tvdbid)
+        elif img_type == self.BANNER:
+            img_type_name = 'banner'
+            dest_path = self.banner_path(show_obj.tvdbid)
+        else:
+            logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
+            return False
+
+        #TODO: refactor
+        metadata_generator = GenericMetadata()
+        img_data = metadata_generator._retrieve_show_image(img_type_name, show_obj)
+        result = metadata_generator._write_image(img_data, dest_path)
+
+        return result
+    
+    def fill_cache(self, show_obj):
+
+        logger.log(u"Checking if we need any cache images for show "+str(show_obj.tvdbid), logger.DEBUG)
+        
+        # check if the images are already cached or not
+        need_images = {self.POSTER: not self.has_poster(show_obj.tvdbid),
+                       self.BANNER: not self.has_banner(show_obj.tvdbid),
+                       }
+
+        if not need_images[self.POSTER] and not need_images[self.BANNER]:
+            return
+        
+        # check the show dir for images and use them
+        for cur_provider in sickbeard.metadata_provider_dict.values():
+            if ek.ek(os.path.isfile, cur_provider.get_poster_path(show_obj)):
+                cur_file_name = os.path.abspath(cur_provider.get_poster_path(show_obj))
+                cur_file_type = self.which_type(cur_file_name)
+                
+                logger.log(u"Checking if image "+cur_file_name+" (type "+str(cur_file_type)+" needs metadata: "+str(need_images[cur_file_type]), logger.DEBUG)
+                
+                if cur_file_type in need_images and need_images[cur_file_type]:
+                    logger.log(u"Found an image in the show dir that doesn't exist in the cache, caching it: "+str(cur_file_name)+", type "+str(cur_file_type), logger.DEBUG)
+                    self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.tvdbid)
+                    need_images[cur_file_type] = False
+                    
+        # download from TVDB for missing ones
+        for cur_image_type in [self.POSTER, self.BANNER]:
+            logger.log(u"Seeing if we still need an image of type "+str(cur_image_type)+": "+str(need_images[cur_image_type]), logger.DEBUG)
+            if cur_image_type in need_images and need_images[cur_image_type]:
+                self._cache_image_from_tvdb(show_obj, cur_image_type)
+        
diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py
index abffc828a7161d242c5af5bf8c9d5abec0c7b52c..6bb908741c45080337b9249ac9e667d5647b0a5c 100644
--- a/sickbeard/metadata/generic.py
+++ b/sickbeard/metadata/generic.py
@@ -505,7 +505,7 @@ class GenericMetadata():
             logger.log(u"Unable to look up show on TVDB, not downloading images: "+str(e).decode('utf-8'), logger.ERROR)
             return None
     
-        if image_type not in ('fanart', 'poster'):
+        if image_type not in ('fanart', 'poster', 'banner'):
             logger.log(u"Invalid image type "+str(image_type)+", couldn't find it in the TVDB object", logger.ERROR)
             return None
     
diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py
index 58196e084220c98bcd084c9583675ee9cbbbd91c..db8be99fd71fe5c7ad17d4b8bc7d121bc9d08df3 100644
--- a/sickbeard/show_queue.py
+++ b/sickbeard/show_queue.py
@@ -275,6 +275,7 @@ class QueueItemRefresh(ShowQueueItem):
 
         self.show.refreshDir()
         self.show.writeMetadata()
+        self.show.populateCache()
 
         self.inProgress = False
 
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index 8eaca7d87f4c2af96dcd1b5b64d0be6b73a66a99..22e4c8dc080878c8684595eef28e1c8e10c78bc4 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -21,7 +21,6 @@ from __future__ import with_statement
 import os.path
 import datetime
 import threading
-import urllib
 import re
 import glob
 
@@ -35,9 +34,9 @@ from lib.tvdb_api import tvdb_api, tvdb_exceptions
 
 from sickbeard import db
 from sickbeard import helpers, exceptions, logger
-from sickbeard import processTV
 from sickbeard import tvrage
 from sickbeard import config
+from sickbeard import image_cache
 
 from sickbeard import encodingKludge as ek
 
@@ -625,6 +624,12 @@ class TVShow(object):
         # remove self from show list
         sickbeard.showList = [x for x in sickbeard.showList if x.tvdbid != self.tvdbid]
 
+    def populateCache(self):
+        cache_inst = image_cache.ImageCache()
+        
+        logger.log(u"Checking & filling cache for show "+self.name)
+        cache_inst.fill_cache(self)
+
     def refreshDir(self):
 
         # make sure the show dir is where we think it is
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index de6485e8cada11648f80f500af6905c51c3751e2..d46b23317657052ccfe39c7f797569e32111d54b 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -39,6 +39,7 @@ from sickbeard import tv, versionChecker, ui
 from sickbeard import logger, helpers, exceptions, classes, db
 from sickbeard import encodingKludge as ek
 from sickbeard import search_queue
+from sickbeard import image_cache
 
 from sickbeard.notifiers import xbmc
 from sickbeard.providers import newznab
@@ -1781,7 +1782,7 @@ class WebInterface:
         redirect("/home")
 
     @cherrypy.expose
-    def showPoster(self, show=None):
+    def showPoster(self, show=None, which=None):
 
         if show == None:
             return "Invalid show" #TODO: make it return a standard image
@@ -1791,31 +1792,36 @@ class WebInterface:
         if showObj == None:
             return "Unable to find show" #TODO: make it return a standard image
 
-        for cur_provider in sickbeard.metadata_provider_dict.values():
-            if ek.ek(os.path.isfile, cur_provider.get_poster_path(showObj)):
-                posterFilename = os.path.abspath(cur_provider.get_poster_path(showObj))
-                break
+        cache_obj = image_cache.ImageCache()
+        
+        if which == 'poster':
+            image_file_name = cache_obj.poster_path(showObj.tvdbid)
+        # this is for 'banner' but also the default case
+        else:
+            image_file_name = cache_obj.banner_path(showObj.tvdbid)
 
-        if ek.ek(os.path.isfile, posterFilename):
+        if ek.ek(os.path.isfile, image_file_name):
             try:
                 from PIL import Image
                 from cStringIO import StringIO
             except ImportError: # PIL isn't installed
-                return cherrypy.lib.static.serve_file(posterFilename, content_type="image/jpeg")
+                return cherrypy.lib.static.serve_file(image_file_name, content_type="image/jpeg")
             else:
-                im = Image.open(posterFilename)
+                im = Image.open(image_file_name)
                 if im.mode == 'P': # Convert GIFs to RGB
                     im = im.convert('RGB')
-                if sickbeard.USE_BANNER:
-                  size = 600, 112
+                if which == 'banner':
+                    size = 600, 112
+                elif which == 'poster':
+                    size = 136, 200
                 else:
-                  size = 136, 200
+                    return cherrypy.lib.static.serve_file(image_file_name, content_type="image/jpeg")
                 im.thumbnail(size, Image.ANTIALIAS)
                 buffer = StringIO()
                 im.save(buffer, 'JPEG')
                 return buffer.getvalue()
         else:
-            logger.log(u"No poster for show "+show.name, logger.WARNING) #TODO: make it return a standard image
+            logger.log(u"No image available for show "+show.name, logger.WARNING) #TODO: make it return a standard image
 
     @cherrypy.expose
     def toggleBanners(self):