Projet

Général

Profil

Télécharger (3,09 ko) Statistiques
| Branche: | Révision:

root / larpe / trunk / filter / larpe-filter.py @ ebf37dd5

1
from mod_python import apache
2
import os
3
import re
4

    
5
app_dir = '/var/lib/larpe'
6

    
7
def outputfilter(filter):
8
    # Only filter html code
9
    if filter.req.content_type is not None:
10
        is_html = re.search('text/html', filter.req.content_type)
11
    if filter.req.content_type is not None and not is_html:
12
        filter.pass_on()
13
    else:
14
        if not hasattr(filter.req, 'temp_doc'): # the start
15
            filter.req.temp_doc = [] # create new attribute to hold document
16
            # If content-length ended up wrong, Gecko browsers truncated data, so
17
            if 'Content-Length' in filter.req.headers_out:
18
                del filter.req.headers_out['Content-Length']
19
        
20
#        filter.write(filter.req.headers_in['Cookie'])
21
#        delete_cookies(filter)
22
        #filter.req.headers_out['Set-Cookie'] = 'dc_admin="deleted"; max-age=0; expires=Thu, 01-Jan-1970 00:00:00 GMT; path=/'
23

    
24
        temp_doc = filter.req.temp_doc
25
        s = filter.read()
26
        while s: # could get '' at any point, but only get None at end
27
            temp_doc.append(s)
28
            s = filter.read()
29

    
30
        if s is None: # the end
31
            temp_doc = ''.join(temp_doc)
32
            #filter.req.set_content_length(len(temp_doc)) # this didn't seem to work
33
            filter.write(temp_doc)
34
            #filter.write(filter.req.uri)
35
            filter.close()
36

    
37
def get_abs_path(s):
38
    if not s:
39
        return s
40
    if s[0] == '/':
41
        return s
42
    return os.path.join(app_dir, s)
43
    
44
def get_proxied_site_path(filter):
45
    proxy_domain_name = filter.req.hostname
46
    proxied_site_dir = get_proxied_site_name(filter)
47
    return get_abs_path(os.path.join('sp', proxy_domain_name, proxied_site_dir))
48

    
49
def get_proxied_site_name(filter):
50
    uri_tokens = filter.req.uri.split('/')
51
    if uri_tokens[1] != 'liberty':
52
        return uri_tokens[1]
53
    return uri_tokens[2]
54
    
55
def delete_cookies(filter):
56
    success = False
57
    cookies_file_name = get_abs_path(os.path.join(get_proxied_site_path(filter), 'cookies_to_delete'))
58
    cookies_file = open(cookies_file_name, 'r')
59
    for cookie in cookies_file.read().split():
60
        if filter.req.headers_in.has_key('Cookie'):
61
            cookies_header = filter.req.headers_in['Cookie']
62
#            filter.req.temp_doc.append(filter.req.headers_in['Cookie'])
63
            #filter.req.temp_doc.append(cookie[len(cookie) -1:])
64
            cookies_match =  re.findall(cookie[:len(cookie) -1], cookies_header)
65
            if len(cookies_match) > 0:
66
                filter.req.temp_doc.append('User-Agent')
67
#                filter.req.temp_doc.append('%s="deleted"; max-age=0; expires=Thu, 01-Jan-1970 00:00:00 GMT; path=/' % cookie.split('=')[0])
68
                filter.req.headers_out['Set-Cookie'] = '%s="deleted"; max-age=0; expires=Thu, 01-Jan-1970 00:00:00 GMT; path=/' % cookie.split('=')[0]
69
#                cookies_file.close()
70
#                cookies_file = open(cookies_file_name, 'w')
71
                break
72
#            else:
73
#                filter.req.temp_doc.append('dommage')
74
    cookies_file.close()
75
#    if success:
76
#        cookies_file = open(cookies_file_name, 'w')
77
#        cookies_file.close()
    (1-1/1)