Private
Server IP : 195.201.23.43  /  Your IP : 3.140.184.21
Web Server : Apache
System : Linux webserver2.vercom.be 5.4.0-192-generic #212-Ubuntu SMP Fri Jul 5 09:47:39 UTC 2024 x86_64
User : kdecoratie ( 1041)
PHP Version : 7.1.33-63+ubuntu20.04.1+deb.sury.org+1
Disable Function : pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,
MySQL : OFF  |  cURL : ON  |  WGET : ON  |  Perl : ON  |  Python : OFF  |  Sudo : ON  |  Pkexec : ON
Directory :  /lib/python3/dist-packages/twisted/python/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Command :


[ HOME SHELL ]     

Current File : /lib/python3/dist-packages/twisted/python/htmlizer.py
# -*- test-case-name: twisted.python.test.test_htmlizer -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.

"""
HTML rendering of Python source.
"""

from twisted.python.compat import _tokenize, escape

import tokenize, keyword
from . import reflect
from twisted.python._oldstyle import _oldStyle


@_oldStyle
class TokenPrinter:
    """
    Format a stream of tokens and intermediate whitespace, for pretty-printing.
    """

    currentCol, currentLine = 0, 1
    lastIdentifier = parameters = 0
    encoding = "utf-8"

    def __init__(self, writer):
        """
        @param writer: A file-like object, opened in bytes mode.
        """
        self.writer = writer


    def printtoken(self, type, token, sCoordinates, eCoordinates, line):
        if hasattr(tokenize, "ENCODING") and type == tokenize.ENCODING:
            self.encoding = token
            return

        if not isinstance(token, bytes):
            token = token.encode(self.encoding)

        (srow, scol) = sCoordinates
        (erow, ecol) = eCoordinates
        if self.currentLine < srow:
            self.writer(b'\n' * (srow-self.currentLine))
            self.currentLine, self.currentCol = srow, 0
        self.writer(b' ' * (scol-self.currentCol))
        if self.lastIdentifier:
            type = "identifier"
            self.parameters = 1
        elif type == tokenize.NAME:
            if keyword.iskeyword(token):
                type = 'keyword'
            else:
                if self.parameters:
                    type = 'parameter'
                else:
                    type = 'variable'
        else:
            type = tokenize.tok_name.get(type).lower()
        self.writer(token, type)
        self.currentCol = ecol
        self.currentLine += token.count(b'\n')
        if self.currentLine != erow:
            self.currentCol = 0
        self.lastIdentifier = token in (b'def', b'class')
        if token == b':':
            self.parameters = 0



@_oldStyle
class HTMLWriter:
    """
    Write the stream of tokens and whitespace from L{TokenPrinter}, formating
    tokens as HTML spans.
    """

    noSpan = []

    def __init__(self, writer):
        self.writer = writer
        noSpan = []
        reflect.accumulateClassList(self.__class__, "noSpan", noSpan)
        self.noSpan = noSpan


    def write(self, token, type=None):
        if isinstance(token, bytes):
            token = token.decode("utf-8")
        token = escape(token)
        token = token.encode("utf-8")
        if (type is None) or (type in self.noSpan):
            self.writer(token)
        else:
            self.writer(
                b'<span class="py-src-' + type.encode("utf-8") + b'">' +
                token + b'</span>')



class SmallerHTMLWriter(HTMLWriter):
    """
    HTMLWriter that doesn't generate spans for some junk.

    Results in much smaller HTML output.
    """
    noSpan = ["endmarker", "indent", "dedent", "op", "newline", "nl"]



def filter(inp, out, writer=HTMLWriter):
    out.write(b'<pre>')
    printer = TokenPrinter(writer(out.write).write).printtoken
    try:
        for token in _tokenize(inp.readline):
            (tokenType, string, start, end, line) = token
            printer(tokenType, string, start, end, line)
    except tokenize.TokenError:
        pass
    out.write(b'</pre>\n')



def main():
    import sys
    stdout = getattr(sys.stdout, "buffer", sys.stdout)
    with open(sys.argv[1], "rb") as f:
        filter(f, stdout)

if __name__ == '__main__':
    main()
Private