Source file src/cmd/asm/internal/lex/lex.go

     1  // Copyright 2015 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Package lex implements lexical analysis for the assembler.
     6  package lex
     7  
     8  import (
     9  	"fmt"
    10  	"log"
    11  	"os"
    12  	"strings"
    13  	"text/scanner"
    14  
    15  	"cmd/internal/src"
    16  )
    17  
    18  // A ScanToken represents an input item. It is a simple wrapping of rune, as
    19  // returned by text/scanner.Scanner, plus a couple of extra values.
    20  type ScanToken rune
    21  
    22  const (
    23  	// Asm defines some two-character lexemes. We make up
    24  	// a rune/ScanToken value for them - ugly but simple.
    25  	LSH          ScanToken = -1000 - iota // << Left shift.
    26  	RSH                                   // >> Logical right shift.
    27  	ARR                                   // -> Used on ARM for shift type 3, arithmetic right shift.
    28  	ROT                                   // @> Used on ARM for shift type 4, rotate right.
    29  	Include                               // included file started here
    30  	BuildComment                          // //go:build or +build comment
    31  	macroName                             // name of macro that should not be expanded
    32  )
    33  
    34  // IsRegisterShift reports whether the token is one of the ARM register shift operators.
    35  func IsRegisterShift(r ScanToken) bool {
    36  	return ROT <= r && r <= LSH // Order looks backwards because these are negative.
    37  }
    38  
    39  func (t ScanToken) String() string {
    40  	switch t {
    41  	case scanner.EOF:
    42  		return "EOF"
    43  	case scanner.Ident:
    44  		return "identifier"
    45  	case scanner.Int:
    46  		return "integer constant"
    47  	case scanner.Float:
    48  		return "float constant"
    49  	case scanner.Char:
    50  		return "rune constant"
    51  	case scanner.String:
    52  		return "string constant"
    53  	case scanner.RawString:
    54  		return "raw string constant"
    55  	case scanner.Comment:
    56  		return "comment"
    57  	default:
    58  		return fmt.Sprintf("%q", rune(t))
    59  	}
    60  }
    61  
    62  // NewLexer returns a lexer for the named file and the given link context.
    63  func NewLexer(name string) TokenReader {
    64  	input := NewInput(name)
    65  	fd, err := os.Open(name)
    66  	if err != nil {
    67  		log.Fatalf("%s\n", err)
    68  	}
    69  	input.Push(NewTokenizer(name, fd, fd))
    70  	return input
    71  }
    72  
    73  // The other files in this directory each contain an implementation of TokenReader.
    74  
    75  // A TokenReader is like a reader, but returns lex tokens of type Token. It also can tell you what
    76  // the text of the most recently returned token is, and where it was found.
    77  // The underlying scanner elides all spaces except newline, so the input looks like a stream of
    78  // Tokens; original spacing is lost but we don't need it.
    79  type TokenReader interface {
    80  	// Next returns the next token.
    81  	Next() ScanToken
    82  	// The following methods all refer to the most recent token returned by Next.
    83  	// Text returns the original string representation of the token.
    84  	Text() string
    85  	// File reports the source file name of the token.
    86  	File() string
    87  	// Base reports the position base of the token.
    88  	Base() *src.PosBase
    89  	// SetBase sets the position base.
    90  	SetBase(*src.PosBase)
    91  	// Line reports the source line number of the token.
    92  	Line() int
    93  	// Col reports the source column number of the token.
    94  	Col() int
    95  	// Close does any teardown required.
    96  	Close()
    97  }
    98  
    99  // A Token is a scan token plus its string value.
   100  // A macro is stored as a sequence of Tokens with spaces stripped.
   101  type Token struct {
   102  	ScanToken
   103  	text string
   104  }
   105  
   106  // Make returns a Token with the given rune (ScanToken) and text representation.
   107  func Make(token ScanToken, text string) Token {
   108  	// If the symbol starts with center dot, as in ·x, rewrite it as ""·x
   109  	if token == scanner.Ident && strings.HasPrefix(text, "\u00B7") {
   110  		text = `""` + text
   111  	}
   112  	// Substitute the substitutes for . and /.
   113  	text = strings.Replace(text, "\u00B7", ".", -1)
   114  	text = strings.Replace(text, "\u2215", "/", -1)
   115  	return Token{ScanToken: token, text: text}
   116  }
   117  
   118  func (l Token) String() string {
   119  	return l.text
   120  }
   121  
   122  // A Macro represents the definition of a #defined macro.
   123  type Macro struct {
   124  	name   string   // The #define name.
   125  	args   []string // Formal arguments.
   126  	tokens []Token  // Body of macro.
   127  }
   128  
   129  // Tokenize turns a string into a list of Tokens; used to parse the -D flag and in tests.
   130  func Tokenize(str string) []Token {
   131  	t := NewTokenizer("command line", strings.NewReader(str), nil)
   132  	var tokens []Token
   133  	for {
   134  		tok := t.Next()
   135  		if tok == scanner.EOF {
   136  			break
   137  		}
   138  		tokens = append(tokens, Make(tok, t.Text()))
   139  	}
   140  	return tokens
   141  }
   142  

View as plain text