solution_222d.py

#!/usr/bin/python3
# ====================================================================
# convert infix expression to postfix expression
#
# This code is not perfect. Some errors are missed in badly
# formed numerical expressions. (i.e. 1///3)
# --------------------------------------------------------------------
# see algorithem found at
# www.web4college.com/converters/infix-to-postfix-prefix.php
# Infix to Postfix/Prefix converter
# --------------------------------------------------------------------
# Format: precedence description
#   0  number
#   1  sub-start
#   2  sub-end
#   3  power
#   4  floor
#   4  multiply
#   4  divide
#   4  remainder
#   5  add
#   5  subtract
# ====================================================================

import sys
from infix_stack import Stack
from infix_queue import Queue
from infix_tokenizer import convert_to_tokens,display_token_list
import user_interface as ui

STEPS  = False
ERROR1 = 'Error: did not find sub-start "(" in operator stack'
ERROR2 = 'Error: did not find sub-end ")" in operator stack'
MSG1   = 'infix tokens exausted - copy stack to RPN'

MATHOPS = ( '**', '//', '/', '*', '+', '-', '%' )

# --------------------------------------------------------------------
# ---- convert infix expression to RPN expression
# --------------------------------------------------------------------

def infix_to_rpn(infix_tokens:list) -> tuple:

    stack = Stack()

    # ----------------------------------------------------------------
    # ---- display the result of processing step
    # ----------------------------------------------------------------

    def _step_results(token,stack,rpn_tokens):

        if not STEPS: return

        print('-----------------------------------------------------')
        if token is not None:
            print(f'processing token {token[0]}')
        string = ' '.join([x[0] for x in stack.as_list()])
        print(f'stack     : {string}')
        string = ' '.join([x[0] for x in rpn_tokens])
        print(f'RPN tokens: {string}')
        return

    # ------------------------------------------------------
    # ---- process sub-end token
    # ------------------------------------------------------

    def _process_sub_end(stack,rpn_tokens):

        while not stack.is_empty():
            tos = stack.pop()
            # ---- TOS is '('
            if tos[1] == 1: return True
            # ---- add TOS to  RPN token list
            rpn_tokens.append(tos)
        return False

    # ------------------------------------------------------
    # ---- process infix expression tokens
    # ------------------------------------------------------

    rpn_tokens = []            # RPN token list

    for token in infix_tokens:

        # ---- token is a number

        if token[1] == 0:
            rpn_tokens.append(token)
            _step_results(token,stack,rpn_tokens)
            continue

        # --- token is start-sub

        if token[1] == 1:
            stack.push(token)
            _step_results(token,stack,rpn_tokens)
            continue

        # --- token is end-sub

        if token[1] == 2:
            tf = _process_sub_end(stack,rpn_tokens)
            if not tf:
                return (False,rpn_tokens)
            _step_results(token,stack,rpn_tokens)
            continue

        # ---- if the stack is empty

        if stack.is_empty():
            stack.push(token)
            _step_results(token,stack,rpn_tokens)
            continue

        # ---- token is a math operator and
        # ---- token precedence is greater than TOS

        if token[1] in MATHOPS and \
           token[1] < stack.tos()[1]:
            stack.push(token)
            _step_results(token,stack,rpn_tokens)
            continue

        # ---- token precedence is less than or equal TOS

        while True:
            if stack.is_empty(): break
            if stack.tos()[1] in [1,2]: break  # '(' or ')'
            if token[1] >= stack.tos()[1]:
                tos = stack.pop()
                rpn_tokens.append(tos)
            else:
                break
        stack.push(token)
        _step_results(token,stack,rpn_tokens)
        continue

        # ---- oops!

        print()
        print(f'({token}) OOPS! we should never have gotten here.')
        sys.exit()

    # ---- move remaining stack to the RPN token list

    if STEPS:
        print()
        print(MSG1)
        print()

    while not stack.is_empty():
        # --- sub-start?
        if stack.tos()[1] == 1: return (False,rpn_tokens)
        # --- sub-end?
        if stack.tos()[1] == 2: return (False,rpn_tokens)
        # ---- move TOS to RPN token list
        tos = stack.pop()
        rpn_tokens.append(tos)

    _step_results(None,stack,rpn_tokens)
    return (True,rpn_tokens)

# --------------------------------------------------------------------
# ---- main
# --------------------------------------------------------------------

if __name__ == '__main__':

    INFIXTOKENS = False

    import user_interface as ui

    # ---- process user's infix expressions

    startup_message ='''\

+----------------------------------------------------+
| Convert INFIX Expression to RPN/POSTFIX            |
|                                                    |
| only enter numbers and operators                   |
|                                                    |
| Special menu items are                             |
|   show steps :  step, steps                        |
|   show tokens:  infix, tokens                      |
|   show both  :  debug, verbose                     |
|                                                    |
| Does some but not extensive syntax checking        |
+----------------------------------------------------+'''

    print(startup_message)

    while(True):

        # ---- get infix expression

        print()
        ex = ui.get_user_input('Enter infix expression: ')

        if not ex: break

        # toggle debug verbose

        if ex == 'debug' or ex == 'verbose':
            STEPS = not STEPS
            INFIXTOKENS = not INFIXTOKENS
            continue

        # ---- toggle display intermediate steps

        if ex == 'steps' or ex == 'step':
            STEPS = not STEPS
            continue

        # ---- toggle display infix tokens

        if ex == 'infix' or ex == 'tokens':
            INFIXTOKENS = not INFIXTOKENS
            continue

        # ---- convert infix expression to tokens

        tf,infix_tokens = convert_to_tokens(ex)

        if not tf:
            print()
            print('syntax error - tokenizer failed')
            print(','.join([v[0] for v in infix_tokens]))
            continue

        # ---- display infix tokens returned by tokenizer

        if INFIXTOKENS:
            print()
            print('infix tokens found by tokenizer')
            print(infix_tokens)
            print()

        # ---- convert infix tokens to RPN

        tf,rpn_tokens = infix_to_rpn(infix_tokens)

        if not tf:
            print()
            print('infix to RPN conversion failed')
            continue

        # ---- display results

        print()

        print(f'expr : {ex}')
        string = ' '.join([x[0] for x in infix_tokens])
        print(f'infix: {string}')
        string = ' '.join([x[0] for x in rpn_tokens])
        print(f'rpn  : {string}')