diff -ub --recursive dillo-0.8.5-i18n-misc-20051127.dist/dpi/Makefile.am dillo-0.8.5-i18n-misc-20051127/dpi/Makefile.am --- dillo-0.8.5-i18n-misc-20051127.dist/dpi/Makefile.am 2005-12-05 06:34:36.000000000 -0800 +++ dillo-0.8.5-i18n-misc-20051127/dpi/Makefile.am 2005-12-05 05:54:32.000000000 -0800 @@ -5,12 +5,14 @@ downloadsdir = $(libdir)/dillo/dpi/downloads ftpdir = $(libdir)/dillo/dpi/ftp httpsdir = $(libdir)/dillo/dpi/https +datauridir = $(libdir)/dillo/dpi/datauri hellodir = $(libdir)/dillo/dpi/hello filedir = $(libdir)/dillo/dpi/file bookmarks_PROGRAMS = bookmarks.dpi downloads_PROGRAMS = downloads.dpi ftp_PROGRAMS = ftp.filter.dpi https_PROGRAMS = https.filter.dpi +datauri_PROGRAMS = datauri.filter.dpi hello_PROGRAMS = hello.filter.dpi file_PROGRAMS = file.dpi @@ -27,5 +29,6 @@ downloads_dpi_SOURCES = downloads.c dpiutil.c dpiutil.h ftp_filter_dpi_SOURCES = ftp.c dpiutil.c dpiutil.h https_filter_dpi_SOURCES = https.c dpiutil.c dpiutil.h +datauri_filter_dpi_SOURCES = hello_filter_dpi_SOURCES = hello.c dpiutil.c dpiutil.h file_dpi_SOURCES = file.c dpiutil.c dpiutil.h diff -ub --recursive dillo-0.8.5-i18n-misc-20051127.dist/dpi/Makefile.in dillo-0.8.5-i18n-misc-20051127/dpi/Makefile.in diff -ub --recursive dillo-0.8.5-i18n-misc-20051127.dist/dpi/datauri.filter.dpi dillo-0.8.5-i18n-misc-20051127/dpi/datauri.filter.dpi --- dillo-0.8.5-i18n-misc-20051127.dist/dpi/datauri.filter.dpi 2005-12-05 06:35:15.000000000 -0800 +++ dillo-0.8.5-i18n-misc-20051127/dpi/datauri.filter.dpi 2005-12-05 06:17:26.000000000 -0800 @@ -0,0 +1,156 @@ +#!/usr/bin/env python + +import urllib +import sys +import cgi +try: import cStringIO as _StringIO +except: import StringIO as _StringIO +import signal + +DPIPREFIX='dpi:/datauri/' +DPIPREFIX2='dpi:/data/' +URIPREFIX='data:' +B64SUFFIX=';base64' + +# If it takes longer than this to read a byte, give up -- dpid +# probably stopped feeding us when it reached its internal buffer +# length limit. + +READ_TIMEOUT = 10 + +# Reset the watchdog after each PROGRESS_BYTES bytes received. + +PROGRESS_BYTES = 4096 + +EXCEPTION_PAGE = u''' + + + + + Operation Failed + + +
+

Operation Failed

+

%s

+

Sorry, but I am unable to process this request.

+

The data: URI you are using may be too large.

+
+ + +''' + +def dpiunquote(s): + ''' + unquote a dpi parameter value + ''' + if len(s) > 1 and s[0] in ('"', "'") and s[-1] == s[0]: s = s[1:][:-1] + s = '"'.join(s.split('""')) + s = "'".join(s.split("''")) + return s + +def dpiparse(s): + ''' + parse a dpi tag into a dictionary of parameters and values + ''' + if s[:len(''):] != '>': + raise 'missing >' + s = s[len('')] + return dict([ pair for pair in [ [ dpiunquote(x) for x in kv.split('=', 1) ] for kv in s.split(' ') ] if len(pair) == 2 ]) + +def dpiread(stream, progress_callback = None): + ''' + read and return an unparsed dpi tag + ''' + s = _StringIO.StringIO() + quoted = '' + x = 0 + while True: + d = stream.read(1) + x += len(d) + if len(d) and progress_callback is not None and not x % PROGRESS_BYTES: + progress_callback(x) + pass + if quoted and d == quoted: + quoted = '' + elif not quoted and d in ('"', "'"): + quoted = d + pass + if quoted and d == ' ': + d = '%20' + pass + if not d: + raise EOFError("Missing expected dpi tag after %d bytes." % x) + else: + s.write(d) + pass + if d != '>' or quoted: + continue + break + s.seek(0) + return s.read() + +def main(instream, outstream): + dpidata = None + def gotalarm(signum, context): + if dpidata is None: + raise Exception("Timer Expired After %ss" % READ_TIMEOUT) + return + def progress(length): + signal.alarm(READ_TIMEOUT) + return + try: + oldhandler = signal.signal(signal.SIGALRM, gotalarm) + signal.alarm(READ_TIMEOUT) + dpidata = dpiread(instream, progress) + signal.alarm(0) + if oldhandler is not None: + signal.signal(signal.SIGALRM, oldhandler) + pass + dpi = dpiparse(dpidata) + if dpi['cmd'] != 'open_url': raise 'cmd is not open_url' + if dpi['url'][:len(DPIPREFIX)].lower() == DPIPREFIX.lower(): + dpi['url'] = URIPREFIX + dpi['url'][len(DPIPREFIX):] + pass + elif dpi['url'][:len(DPIPREFIX2)].lower() == DPIPREFIX2.lower(): + dpi['url'] = URIPREFIX + dpi['url'][len(DPIPREFIX2):] + pass + if dpi['url'][:len(URIPREFIX)].lower() != URIPREFIX.lower(): + raise 'not a %s uri' % URIPREFIX + pass + except Exception, e: + outstream.write("\nContent-Type: text/html; charset=utf-8\n\n%s" % (DPIPREFIX, (unicode(EXCEPTION_PAGE) % unicode(e)).encode('utf-8'))) + outstream.flush() + outstream.close() + raise + dataurl = dpi['url'][len(URIPREFIX):] + pair = dataurl.split(',', 1) + if len(pair) < 2: pair.append('') + type, data = pair + type = urllib.unquote(type) + if type[-len(B64SUFFIX):].lower() == B64SUFFIX: + try: + data = urllib.unquote(data).decode('base64') + type = type[:-len(B64SUFFIX)] + except Exception, e: + # Treat malformed Base-64 as non-Base-64 + data = urllib.unquote(data) + else: + data = urllib.unquote(data) + pass + if not type: + type = 'text/plain;charset=US-ASCII' + pass + if type[:1] == ';': + type = 'text/plain' + type + pass + outstream.write("\n" % "''".join(dpi['url'].split("'"))) + outstream.write("Content-Type: " + type + "\n\n" + data) + outstream.flush() + outstream.close() + +if __name__ == '__main__': + main(sys.stdin, sys.stdout) --- dillo-0.8.5-i18n-misc-20051127.dist/src/IO/Url.c 2005-11-06 05:53:37.000000000 -0800 +++ dillo-0.8.5-i18n-misc-20051127/src/IO/Url.c 2005-12-05 05:40:21.000000000 -0800 @@ -89,10 +89,11 @@ {"dpi" , a_Dpi_ccc}, {"ftp" , a_Dpi_ccc}, #ifdef DILLO_SSL - {"https", a_Https_ccc} + {"https", a_Https_ccc}, #else - {"https" , a_Dpi_ccc} + {"https" , a_Dpi_ccc}, #endif + {"data" , a_Dpi_ccc} }; #define LSIZE (sizeof(cccList) / sizeof(cccList[0])) diff -ub --recursive dillo-0.8.5-i18n-misc-20051127.dist/src/capi.c dillo-0.8.5-i18n-misc-20051127/src/capi.c --- dillo-0.8.5-i18n-misc-20051127.dist/src/capi.c 2005-11-06 06:46:41.000000000 -0800 +++ dillo-0.8.5-i18n-misc-20051127/src/capi.c 2005-12-05 05:56:34.000000000 -0800 @@ -260,6 +260,8 @@ #endif } else if (g_strncasecmp(url_str, "file:", 5) == 0) { server = g_strdup("file"); + } else if (g_strncasecmp(url_str, "data:", 5) == 0) { + server = g_strdup("datauri"); } return ((*server_ptr = server) ? 1 : 0);