diff --git a/.github/workflows/workflow.yaml b/.github/workflows/workflow.yaml index 1a91647..a1cc5ae 100644 --- a/.github/workflows/workflow.yaml +++ b/.github/workflows/workflow.yaml @@ -3,10 +3,10 @@ name: TypeShell Linux on: push: branches: - - main + - '*' pull_request: branches: - - main + - '*' release: types: - created diff --git a/README.md b/README.md index 0d683b9..77b10e8 100644 --- a/README.md +++ b/README.md @@ -287,3 +287,10 @@ print(s[0]) // Prints "Hello". print(s[1]) // Prints "". print(s[2]) // Prints "World". ``` + +## Visual Studio Code +There is no extension for VSCode yet. However, since the code is very Go-like, adding the ".tsh" extension to the settings should serve as a first workaround. +- Open VSCode. +- Go to File -> Preferences -> Settings. +- Seach for "file associations". +- Add "*.tsh" to the list and associate it with Go. diff --git a/converters/bash/converter.go b/converters/bash/converter.go index ed01137..c388c71 100644 --- a/converters/bash/converter.go +++ b/converters/bash/converter.go @@ -34,8 +34,7 @@ func New() *converter { } func (c *converter) StringToString(value string) string { - // Replace "\\n" with "\n". - return strings.ReplaceAll(value, "\\n", "\n") + return value } func (c *converter) Dump() (string, error) { @@ -81,14 +80,16 @@ func (c *converter) ProgramEnd() error { "while [ ${_i} -lt ${_l} ]; do", fmt.Sprintf("local _v=%s", c.sliceEvaluationString("${2}", "${_i}")), c.sliceAssignmentString("${_n}", "${_i}", "${_v}", false), - "_i=$(expr ${_i} + 1)", + "_i=$((${_i}+1))", "done", ) } if c.stringSubscriptHelperRequired { c.addHelper("substring", "_ssh", - "echo \"${1}\" | cut -c $(expr ${2} \\+ 1)-$(expr ${3} \\+ 1)", // Cut index starts at 1, therefore 1 must be added to 0-based subscript. + `_ls=$((${2}))`, + `_ll=$(((${3}-${2})+1))`, + `_ret="${1:${_ls}:${_ll}}"`, ) } return nil @@ -99,17 +100,7 @@ func (c *converter) VarDefinition(name string, value string, global bool) error } func (c *converter) VarAssignment(name string, value string, global bool) error { - length := len(value) - - if length > 0 { - if string(value[length-1]) != "\"" { - value = fmt.Sprintf("%s\"", value) - } - if string(value[0]) != "\"" { - value = fmt.Sprintf("\"%s", value) - } - } - c.addLine(fmt.Sprintf("%s=%s", c.varName(name, global), value)) + c.addLine(c.varAssignmentString(name, value, global)) return nil } @@ -127,7 +118,8 @@ func (c *converter) FuncStart(name string, params []string, returnTypes []parser c.addLine(fmt.Sprintf("%s() {", name)) for i, param := range params { - c.VarAssignment(param, fmt.Sprintf("$%d", i+1), false) + s := c.varAssignmentString(param, fmt.Sprintf("$%d", i+1), false) + c.addLine(fmt.Sprintf("local %s", s)) } return nil } @@ -279,7 +271,7 @@ func (c *converter) BinaryOperation(left string, operator parser.BinaryOperator, default: return notAllowedError() } - c.VarAssignment(helper, fmt.Sprintf("$(expr %s \\%s %s)", left, operator, right), false) // Backslash is required for * operator to prevent pattern expansion (https://www.shell-tips.com/bash/math-arithmetic-calculation/#using-the-expr-command-line). + c.VarAssignment(helper, fmt.Sprintf("$((%s%s%s))", left, operator, right), false) // Backslash is required for * operator to prevent pattern expansion (https://www.shell-tips.com/bash/math-arithmetic-calculation/#using-the-expr-command-line). case parser.DATA_TYPE_STRING: switch operator { case parser.BINARY_OPERATOR_ADDITION: @@ -419,7 +411,8 @@ func (c *converter) SliceLen(name string, valueUsed bool) (string, error) { func (c *converter) StringSubscript(value string, startIndex string, endIndex string, valueUsed bool) (string, error) { helper := c.nextHelperVar() - c.VarAssignment(helper, fmt.Sprintf("$(_ssh \"%s\" %s %s)", value, startIndex, endIndex), false) // https://www.baeldung.com/linux/bash-substring#1-using-thecut-command + c.addLine(fmt.Sprintf(`_ssh "%s" %s %s`, value, startIndex, endIndex)) + c.VarAssignment(helper, c.varEvaluationString("_ret", true), false) // https://www.baeldung.com/linux/bash-substring#1-using-thecut-command c.stringSubscriptHelperRequired = true return c.varEvaluationString(helper, false), nil @@ -559,6 +552,20 @@ func (c *converter) varName(name string, global bool) string { return name } +func (c *converter) varAssignmentString(name string, value string, global bool) string { + length := len(value) + + if length > 0 { + if string(value[length-1]) != `"` { + value = fmt.Sprintf(`%s"`, value) + } + if string(value[0]) != `"` { + value = fmt.Sprintf(`"%s`, value) + } + } + return fmt.Sprintf("%s=%s", c.varName(name, global), value) +} + func (c *converter) varEvaluationString(name string, global bool) string { return fmt.Sprintf("${%s}", c.varName(name, global)) } diff --git a/converters/batch/converter.go b/converters/batch/converter.go index 42cc021..93426cc 100644 --- a/converters/batch/converter.go +++ b/converters/batch/converter.go @@ -224,7 +224,7 @@ func (c *converter) ProgramEnd() error { c.addHelper("string length", stringLengthHelper, "set _l=0", ":_stlhl", - fmt.Sprintf(`if "!%s:~%%_l%%!" equ "" goto :_stlhle`, funcArgVar(0)), // https://www.geeksforgeeks.org/batch-script-string-length/ + fmt.Sprintf(`if "!%s!" equ "" (goto :_stlhle) else if "!%s:~%%_l%%!" equ "" goto :_stlhle`, funcArgVar(0), funcArgVar(0)), // https://www.geeksforgeeks.org/batch-script-string-length/ `set /A "_l=%_l%+1"`, "goto :_stlhl", ":_stlhle", diff --git a/examples/find_linux.tsh b/examples/find_linux.tsh new file mode 100644 index 0000000..a343287 --- /dev/null +++ b/examples/find_linux.tsh @@ -0,0 +1,10 @@ +import "strings" // Import standard library "strings". + +pattern := input("Find pattern: ") // Ask user for pattern input. +filesString, stderr, code := @ls("-1") | @grep(pattern) // Make program calls. +files := strings.Split(filesString, "\n") // Split files list at newline. + +// Iterate files. +for i, f := range files { + print(i, f) +} diff --git a/examples/find_windows.tsh b/examples/find_windows.tsh new file mode 100644 index 0000000..1d8fcf0 --- /dev/null +++ b/examples/find_windows.tsh @@ -0,0 +1,10 @@ +import "strings" // Import standard library "strings". + +pattern := input("Find pattern: ") // Ask user for pattern input. +filesString, stderr, code := @dir("/b") | @findstr(pattern) // Make program calls. +files := strings.Split(filesString, "\n") // Split files list at newline. + +// Iterate files. +for i, f := range files { + print(i, f) +} diff --git a/lexer/lexer.go b/lexer/lexer.go index eedf38b..330a093 100644 --- a/lexer/lexer.go +++ b/lexer/lexer.go @@ -248,7 +248,7 @@ func Tokenize(source string) ([]Token, error) { ogRow := row ogColumn := column - if c0 == "\"" { + if raw := c0 == "`"; raw || c0 == `"` { // Evaluate string. str := "" i++ @@ -257,7 +257,7 @@ func Tokenize(source string) ([]Token, error) { c0 = char(source, i) appended := false - if match := regexp.MustCompile(`^\\.`).FindString(source[i:]); match != "" { + if match := regexp.MustCompile(`^\\.`).FindString(source[i:]); !raw && match != "" { // Convert escaped character to be a control character (https://pkg.go.dev/strconv#Unquote). parsed, err := strconv.Unquote(fmt.Sprintf(`"%s"`, match)) @@ -267,7 +267,7 @@ func Tokenize(source string) ([]Token, error) { str += parsed i += len(match) appended = true - } else if c0 == "\"" { + } else if (raw && c0 == "`") || (!raw && c0 == `"`) { // Detected string end. i++ token = newToken(str, STRING_LITERAL, ogRow, ogColumn) @@ -354,7 +354,7 @@ func Tokenize(source string) ([]Token, error) { // If still no token has been found, exit with error. if token.tokenType == UNKNOWN { - err = fmt.Errorf("unknown token \"%s\" at position %d", c0, i) + err = fmt.Errorf(`unknown token "%s" at position %d`, c0, i) break } else if slices.Contains([]TokenType{SPACE, COMMENT}, token.tokenType) { // Ignore spaces and comments for now. diff --git a/parser/parser.go b/parser/parser.go index 645cd0a..5db172e 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -77,7 +77,7 @@ func (c context) buildPrefixedName(name string, prefix string, global bool, chec hash, exists := c.imports[prefix] if checkExistence && !exists { - return "", fmt.Errorf("prefix \"%s\" not found", prefix) + return "", fmt.Errorf(`prefix "%s" not found`, prefix) } name = buildPrefixedName(hash, name) } @@ -369,10 +369,10 @@ func (p Parser) findAllowed(searchTokenType lexer.TokenType, allowed ...lexer.To } if !slices.Contains(allowed, tokenType) { - return lexer.Token{}, fmt.Errorf("found illegal token \"%d\" before \"%d\"", tokenType, searchTokenType) + return lexer.Token{}, fmt.Errorf(`found illegal token "%d" before "%d"`, tokenType, searchTokenType) } } - return lexer.Token{}, fmt.Errorf("token type \"%d\" not found", searchTokenType) + return lexer.Token{}, fmt.Errorf(`token type "%d" not found`, searchTokenType) } func (p Parser) findBefore(searchTokenType lexer.TokenType, before ...lexer.TokenType) (lexer.Token, error) { @@ -388,11 +388,11 @@ func (p Parser) findBefore(searchTokenType lexer.TokenType, before ...lexer.Toke for _, tokenTypeTemp := range before { if tokenTypeTemp == tokenType { - return lexer.Token{}, fmt.Errorf("found \"%d\" before \"%d\"", tokenTypeTemp, tokenType) + return lexer.Token{}, fmt.Errorf(`found "%d" before "%d"`, tokenTypeTemp, tokenType) } } } - return lexer.Token{}, fmt.Errorf("token type \"%d\" not found", searchTokenType) + return lexer.Token{}, fmt.Errorf(`token type "%d" not found`, searchTokenType) } func (p *Parser) eat() lexer.Token { @@ -504,7 +504,7 @@ func (p *Parser) evaluateValues(ctx context) (evaluatedValues, error) { funcName = call.Name() if returnValuesLength == 0 { - return evaluatedValues{}, p.expectedError(fmt.Sprintf("return value from function \"%s\"", funcName), exprToken) + return evaluatedValues{}, p.expectedError(fmt.Sprintf(`return value from function "%s"`, funcName), exprToken) } } // Check if other values follow. @@ -515,7 +515,7 @@ func (p *Parser) evaluateValues(ctx context) (evaluatedValues, error) { // If other values follow, function must only return one value. if returnValuesLength > 1 { - return evaluatedValues{}, p.expectedError(fmt.Sprintf("only one return value from function \"%s\"", funcName), exprToken) + return evaluatedValues{}, p.expectedError(fmt.Sprintf(`only one return value from function "%s"`, funcName), exprToken) } } return evaluatedValues{ @@ -533,7 +533,7 @@ func (p *Parser) evaluateBuiltInFunction(tokenType lexer.TokenType, keyword stri // Make sure after the print call comes a opening round bracket. if nextToken.Type() != lexer.OPENING_ROUND_BRACKET { - return nil, p.expectedError("\"(\"", nextToken) + return nil, p.expectedError(`"("`, nextToken) } expressions := []Expression{} nextToken = p.peek() @@ -555,7 +555,7 @@ func (p *Parser) evaluateBuiltInFunction(tokenType lexer.TokenType, keyword stri } else if nextTokenType == lexer.CLOSING_ROUND_BRACKET { break } else { - return nil, p.expectedError("\",\" or \")\"", nextToken) + return nil, p.expectedError(`"," or ")"`, nextToken) } } } @@ -574,7 +574,7 @@ func (p *Parser) evaluateBuiltInFunction(tokenType lexer.TokenType, keyword stri // Make sure print call is terminated with a closing round bracket. if nextToken.Type() != lexer.CLOSING_ROUND_BRACKET { - return nil, p.expectedError("\")\"", nextToken) + return nil, p.expectedError(`")"`, nextToken) } return stmtCallout(keywordToken, expressions) } @@ -673,7 +673,7 @@ func (p *Parser) evaluateImports(ctx context) ([]Statement, error) { } } else if aliasLen == 0 { // If it's not a standard library path, an alias must be provided. - return nil, fmt.Errorf("an alias must be provided for the local import \"%s\" in \"%s\"", path, p.path) + return nil, fmt.Errorf(`an alias must be provided for the local import "%s" in "%s"`, path, p.path) } importParser := New() importedProg, err := importParser.parse(absPath, true) @@ -683,7 +683,7 @@ func (p *Parser) evaluateImports(ctx context) ([]Statement, error) { } if _, exists := ctx.findImport(alias); exists { - return nil, fmt.Errorf("import alias \"%s\" already exists", alias) + return nil, fmt.Errorf(`import alias "%s" already exists`, alias) } err = ctx.addImport(alias, importParser.prefix) @@ -716,7 +716,7 @@ func (p *Parser) evaluateImports(ctx context) ([]Statement, error) { } else if slices.Contains([]lexer.TokenType{lexer.IDENTIFIER, lexer.STRING_LITERAL}, nextTokenType) { // Nothing to do, parse next import in the next cycle. } else { - return nil, p.expectedError("\")\"", nextToken) + return nil, p.expectedError(`")"`, nextToken) } } } @@ -926,7 +926,7 @@ func (p *Parser) evaluateValueType() (ValueType, error) { nextToken = p.eat() // Eat closing square bracket. if nextToken.Type() != lexer.CLOSING_SQUARE_BRACKET { - return evaluatedType, p.expectedError("\"]\"", nextToken) + return evaluatedType, p.expectedError(`"]"`, nextToken) } nextToken = p.peek() evaluatedType.isSlice = true @@ -1041,7 +1041,7 @@ func (p *Parser) evaluateVarDefinition(ctx context) (Statement, error) { // If the variable already exists, make sure it has the same type as the specified type. if exists && specifiedType.DataType() != DATA_TYPE_UNKNOWN && !specifiedType.Equals(variableValueType) { - return nil, p.atError(fmt.Sprintf("variable \"%s\" already exists but has type %s", name, variableValueType.String()), nextToken) + return nil, p.atError(fmt.Sprintf(`variable "%s" already exists but has type %s`, name, variableValueType.String()), nextToken) } storedName := name @@ -1187,7 +1187,7 @@ func (p *Parser) evaluateCompoundAssignment(ctx context) (Statement, error) { binaryOperator := string(assignOperator[0]) if !slices.Contains(allowedBinaryOperators(valueType), binaryOperator) { - return nil, p.expectedError(fmt.Sprintf("valid %s compound assign operator but got \"%s\"", valueType.String(), assignOperator), assignToken) + return nil, p.expectedError(fmt.Sprintf(`valid %s compound assign operator but got "%s"`, valueType.String(), assignOperator), assignToken) } return VariableAssignment{ variables: []Variable{definedVariable}, @@ -1211,7 +1211,7 @@ func (p *Parser) evaluateVarAssignment(ctx context) (Statement, error) { // Check assign token. if assignToken.Type() != lexer.ASSIGN_OPERATOR { - return nil, p.expectedError("\"=\"", assignToken) + return nil, p.expectedError(`"="`, assignToken) } valuesToken := p.peek() evaluatedVals, err := p.evaluateValues(ctx) @@ -1300,7 +1300,7 @@ func (p *Parser) evaluateParams(ctx context) ([]Variable, error) { nextTokenType := nextToken.Type() if nextTokenType != lexer.COMMA && nextTokenType != lexer.CLOSING_ROUND_BRACKET { - return params, p.expectedError("\",\" or \")\"", nextToken) + return params, p.expectedError(`"," or ")"`, nextToken) } else if nextTokenType == lexer.COMMA { p.eat() } @@ -1388,7 +1388,7 @@ func (p *Parser) evaluateFunctionDefinition(ctx context) (Statement, error) { if nextTokenType == lexer.CLOSING_ROUND_BRACKET { break } else if nextTokenType != lexer.COMMA { - return nil, p.expectedError("\",\" or \")\"", nextToken) + return nil, p.expectedError(`"," or ")"`, nextToken) } returnTypeToken = p.peek() } @@ -1420,16 +1420,16 @@ func (p *Parser) evaluateFunctionDefinition(ctx context) (Statement, error) { if last { // TODO: Add token position to errors to raise clearer error messages. if lastStatement == nil || lastStatement.StatementType() != STATEMENT_TYPE_RETURN { - errTemp = fmt.Errorf("function \"%s\" requires a return statement at the end of the block", name) + errTemp = fmt.Errorf(`function "%s" requires a return statement at the end of the block`, name) } else if returnStatement := lastStatement.(Return); len(returnStatement.Values()) != len(returnTypes) { - errTemp = fmt.Errorf("function \"%s\" requires %d return values but returns %d", name, len(returnTypes), len(returnStatement.Values())) + errTemp = fmt.Errorf(`function "%s" requires %d return values but returns %d`, name, len(returnTypes), len(returnStatement.Values())) } else { for i, returnValue := range returnStatement.Values() { returnType := returnTypes[i] returnValueType := returnValue.ValueType() if !returnValueType.Equals(returnType) { - errTemp = fmt.Errorf("function \"%s\" returns %s but expects %s", name, returnValueType.String(), returnType.String()) + errTemp = fmt.Errorf(`function "%s" returns %s but expects %s`, name, returnValueType.String(), returnType.String()) break } } @@ -1704,12 +1704,13 @@ func (p *Parser) evaluateFor(ctx context) (Statement, error) { var stmt Statement nextToken := p.peek() nextTokenType := nextToken.Type() + nextAfterNextTokenType := p.peekAt(1).Type() // Clone context to avoid modification of the original. ctx = ctx.clone() - // If next token is an identifier and the one after it a comma, parse a for-range statement. - if nextTokenType == lexer.IDENTIFIER && p.peekAt(1).Type() == lexer.COMMA { + // If next token is an identifier and the one after it a comma or a short-init operator and range keyword, parse a for-range statement. + if nextTokenType == lexer.IDENTIFIER && (nextAfterNextTokenType == lexer.COMMA || (nextAfterNextTokenType == lexer.SHORT_INIT_OPERATOR && p.peekAt(2).Type() == lexer.RANGE)) { p.eat() err := p.checkNewVariableNameToken(nextToken, ctx) @@ -1717,22 +1718,28 @@ func (p *Parser) evaluateFor(ctx context) (Statement, error) { return nil, err } indexVarName := nextToken.Value() - nextToken = p.eat() + nextToken = p.peek() + valueVarName := "" - if nextToken.Type() != lexer.COMMA { - return nil, p.expectedError(`","`, nextToken) - } - nextToken = p.eat() - err = p.checkNewVariableNameToken(nextToken, ctx) + if nextToken.Type() == lexer.COMMA { + p.eat() + nextToken = p.eat() - if err != nil { - return nil, err + if nextToken.Type() != lexer.IDENTIFIER { + return nil, p.expectedError("identifier", nextToken) + } + err = p.checkNewVariableNameToken(nextToken, ctx) + + if err != nil { + return nil, err + } + valueVarName = nextToken.Value() } - valueVarName := nextToken.Value() nextToken = p.eat() + hasNamedVar := len(valueVarName) > 0 if nextToken.Type() != lexer.SHORT_INIT_OPERATOR { - return nil, p.expectedError(`":="`, nextToken) + return nil, p.expectedError(`":=" or ","`, nextToken) } nextToken = p.eat() @@ -1764,10 +1771,25 @@ func (p *Parser) evaluateFor(ctx context) (Statement, error) { return nil, p.expectedError("slice or string", nextToken) } iterableValueType.isSlice = false // Make sure the value var is not a slice. - valueVar := NewVariable(valueVarName, iterableValueType, false, false) + forRangeStatements := []Statement{} + + // Add count variable. + ctx.addVariables(p.prefix, false, indexVar) + + // If no value variable has been provided, there's no need to add it. + if hasNamedVar { + valueVar := NewVariable(valueVarName, iterableValueType, false, false) + + // Add value variable. + ctx.addVariables(p.prefix, false, valueVar) - // Add block variables. - ctx.addVariables(p.prefix, false, indexVar, valueVar) + forRangeStatements = []Statement{ + VariableAssignment{ + variables: []Variable{valueVar}, + values: []Expression{iterableEvaluation}, + }, + } + } init := VariableAssignment{ variables: []Variable{indexVar}, @@ -1779,12 +1801,6 @@ func (p *Parser) evaluateFor(ctx context) (Statement, error) { right: Len{iterableExpression}, } increment := incrementDecrementStatement(indexVar, true) - forRangeStatements := []Statement{ - VariableAssignment{ - variables: []Variable{valueVar}, - values: []Expression{iterableEvaluation}, - }, - } statements, err := p.evaluateBlock(nil, ctx, SCOPE_FOR) if err != nil { @@ -1844,7 +1860,7 @@ func (p *Parser) evaluateFor(ctx context) (Statement, error) { // Next token must be a semicolon. if nextToken.Type() != lexer.SEMICOLON { - return nil, p.expectedError("\";\"", nextToken) + return nil, p.expectedError(`";"`, nextToken) } nextToken = p.peek() conditionToken = nextToken @@ -1863,7 +1879,7 @@ func (p *Parser) evaluateFor(ctx context) (Statement, error) { // Next token must be a semicolon. if nextToken.Type() != lexer.SEMICOLON { - return nil, p.expectedError("\";\"", nextToken) + return nil, p.expectedError(`";"`, nextToken) } nextToken = p.peek() @@ -2031,7 +2047,7 @@ func (p *Parser) evaluateSingleExpression(ctx context) (Expression, error) { } default: - return nil, p.atError(fmt.Sprintf("unknown expression type %d \"%s\"", tokenType, value), token) + return nil, p.atError(fmt.Sprintf(`unknown expression type %d "%s"`, tokenType, value), token) } if err != nil { @@ -2049,9 +2065,12 @@ func (p *Parser) evaluateUnaryOperation(ctx context) (Expression, error) { nextToken := p.peek() negate := false - if nextToken.Value() == UNARY_OPERATOR_NEGATE { - negate = true - p.eat() + if nextToken.Type() == lexer.UNARY_OPERATOR { + // Use nested if for possible future unary operators. + if nextToken.Value() == UNARY_OPERATOR_NEGATE { + negate = true + p.eat() + } } valueToken := p.peek() expr, err := p.evaluateSingleExpression(ctx) @@ -2196,7 +2215,7 @@ func (p *Parser) evaluateBinaryOperation(ctx context, allowedOperators []BinaryO allowedTypeOperators := allowedBinaryOperators(leftType) if !slices.Contains(allowedTypeOperators, operator) { - return nil, p.expectedError(fmt.Sprintf("valid %s operator but got \"%s\"", leftType.String(), operator), operatorToken) + return nil, p.expectedError(fmt.Sprintf(`valid %s operator but got "%s"`, leftType.String(), operator), operatorToken) } leftExpression = BinaryOperation{ left: leftExpression, @@ -2235,7 +2254,7 @@ func (p *Parser) evaluateComparison(ctx context) (Expression, error) { allowedOperators := allowedCompareOperators(leftType) if !slices.Contains(allowedOperators, operator) { - return nil, p.expectedError(fmt.Sprintf("valid %s operator but got \"%s\"", leftType.String(), operator), operatorToken) + return nil, p.expectedError(fmt.Sprintf(`valid %s operator but got "%s"`, leftType.String(), operator), operatorToken) } return NewComparison(leftExpression, operator, rightExpression), nil } @@ -2414,7 +2433,7 @@ func (p *Parser) evaluateAppCall(ctx context) (Call, error) { nextToken := p.eat() if nextToken.Type() != lexer.AT { - return nil, p.expectedError("\"@\"", nextToken) + return nil, p.expectedError(`"@"`, nextToken) } nextToken = p.eat() name := nextToken.Value() @@ -2458,7 +2477,7 @@ func (p *Parser) evaluateSliceInstantiation(ctx context) (Expression, error) { nextToken = p.eat() if nextToken.Type() != lexer.OPENING_CURLY_BRACKET { - return nil, p.expectedError("\"{\"", nextToken) + return nil, p.expectedError(`"{"`, nextToken) } nextToken = p.peek() values := []Expression{} @@ -2488,14 +2507,14 @@ func (p *Parser) evaluateSliceInstantiation(ctx context) (Expression, error) { } else if nextTokenType == lexer.CLOSING_CURLY_BRACKET { break } else { - return nil, p.expectedError("\",\" or \"}\"", nextToken) + return nil, p.expectedError(`"," or "}"`, nextToken) } } } nextToken = p.eat() if nextToken.Type() != lexer.CLOSING_CURLY_BRACKET { - return nil, p.expectedError("\"}\"", nextToken) + return nil, p.expectedError(`"}"`, nextToken) } return SliceInstantiation{ dataType: sliceValueType.DataType(), @@ -2530,7 +2549,7 @@ func (p *Parser) evaluateSubscript(ctx context) (Expression, error) { nextToken := p.eat() if nextToken.Type() != lexer.OPENING_SQUARE_BRACKET { - return nil, p.expectedError("\"[\"", nextToken) + return nil, p.expectedError(`"["`, nextToken) } nextToken = p.peek() startToken := nextToken @@ -2589,7 +2608,7 @@ func (p *Parser) evaluateSubscript(ctx context) (Expression, error) { nextToken = p.eat() if nextToken.Type() != lexer.CLOSING_SQUARE_BRACKET { - return nil, p.expectedError("\"]\"", nextToken) + return nil, p.expectedError(`"]"`, nextToken) } // End-index is not included. @@ -2639,7 +2658,7 @@ func (p *Parser) evaluateSliceAssignment(ctx context) (Statement, error) { nextToken := p.eat() if nextToken.Type() != lexer.OPENING_SQUARE_BRACKET { - return nil, p.expectedError("\"[\"", nextToken) + return nil, p.expectedError(`"["`, nextToken) } nextToken = p.peek() index, err := p.evaluateExpression(ctx) @@ -2655,12 +2674,12 @@ func (p *Parser) evaluateSliceAssignment(ctx context) (Statement, error) { nextToken = p.eat() if nextToken.Type() != lexer.CLOSING_SQUARE_BRACKET { - return nil, p.expectedError("\"]\"", nextToken) + return nil, p.expectedError(`"]"`, nextToken) } nextToken = p.eat() if nextToken.Type() != lexer.ASSIGN_OPERATOR { - return nil, p.expectedError("\"=\"", nameToken) + return nil, p.expectedError(`"="`, nameToken) } valueToken := p.peek() value, err := p.evaluateExpression(ctx) @@ -2707,7 +2726,7 @@ func (p *Parser) evaluateIncrementDecrement(ctx context) (Statement, error) { case lexer.DECREMENT_OPERATOR: increment = false default: - return nil, p.expectedError("\"++\" or \"--\"", operationToken) + return nil, p.expectedError(`"++" or "--"`, operationToken) } return incrementDecrementStatement(definedVariable, increment), nil } diff --git a/std/strings.tsh b/std/strings.tsh index 74e60ed..4796626 100644 --- a/std/strings.tsh +++ b/std/strings.tsh @@ -58,3 +58,46 @@ func HasSuffix(s string, suffix string) bool { } return false } + +func Split(s string, sep string) []string { + sLen := len(s) + sepLen := len(sep) + elems := []string{} + + // If s and sep are empty, slice will be empty. + if sLen > 0 || sepLen > 0 { + startI := 0 + endI := 0 + elIndex := 0 + boundary := len(s) + + if sLen > 0 { + boundary -= sepLen + } + + for endI <= boundary { + if s[endI:endI+sepLen] == sep { + sepEmpty := sepLen == 0 + + // If sep is empty, split after every char. + if sepEmpty { + endI++ + } + + if !sepEmpty || endI <= boundary { + elems[elIndex] = s[startI:endI] + endI += sepLen + startI = endI + } + elIndex++ + } else if endI == boundary { + // Add last element to slice. + elems[elIndex] = s[startI:] + break + } else { + endI++ + } + } + } + return elems +} diff --git a/tests/strings.go b/tests/strings.go index 9b14061..29bdcd1 100644 --- a/tests/strings.go +++ b/tests/strings.go @@ -1,6 +1,7 @@ package tests import ( + "strings" "testing" "github.com/stretchr/testify/require" @@ -85,6 +86,47 @@ func testStringRangeNoIndicesSubscriptSuccess(t *testing.T, transpilerFunc trans }) } +func testStringWithNewlineSuccess(t *testing.T, transpilerFunc transpilerFunc) { + transpilerFunc(t, ` + s := "hello\nworld" + + print(s) + `, func(output string, err error) { + require.Nil(t, err) + require.Equal(t, "hello\nworld", output) + }) +} + +func testStringWithoutNewlineSuccess(t *testing.T, transpilerFunc transpilerFunc) { + transpilerFunc(t, ` + s := `+"`hello\\nworld`"+` + + print(s) + `, func(output string, err error) { + require.Nil(t, err) + require.Equal(t, `hello\nworld`, output) + }) +} + +func testMultilineStringSuccess(t *testing.T, transpilerFunc transpilerFunc) { + transpilerFunc(t, ` + s := `+"`"+` + hello + multiline + world + `+"`"+` + + print(s) + `, func(output string, err error) { + require.Nil(t, err) + require.Equal(t, strings.TrimSpace(` + hello + multiline + world + `), output) + }) +} + func testItoaSuccess(t *testing.T, transpilerFunc transpilerFunc) { transpilerFunc(t, ` print("Hello World " + itoa(24)) diff --git a/tests/strings_linux_test.go b/tests/strings_linux_test.go index ad83ca0..bf887eb 100644 --- a/tests/strings_linux_test.go +++ b/tests/strings_linux_test.go @@ -32,6 +32,18 @@ func TestStringRangeNoIndicesSubscriptSuccess(t *testing.T) { testStringRangeNoIndicesSubscriptSuccess(t, transpileBash) } +func TestStringWithNewlineSuccess(t *testing.T) { + testStringWithNewlineSuccess(t, transpileBash) +} + +func TestStringWithoutNewlineSuccess(t *testing.T) { + testStringWithoutNewlineSuccess(t, transpileBash) +} + +func TestMultilineStringSuccess(t *testing.T) { + testMultilineStringSuccess(t, transpileBash) +} + func TestItoaSuccess(t *testing.T) { testItoaSuccess(t, transpileBash) } diff --git a/tests/strings_windows_test.go b/tests/strings_windows_test.go index 98e83a8..933bb73 100644 --- a/tests/strings_windows_test.go +++ b/tests/strings_windows_test.go @@ -32,6 +32,18 @@ func TestStringRangeNoIndicesSubscriptSuccess(t *testing.T) { testStringRangeNoIndicesSubscriptSuccess(t, transpileBatch) } +func TestStringWithNewlineSuccess(t *testing.T) { + testStringWithNewlineSuccess(t, transpileBatch) +} + +func TestStringWithoutNewlineSuccess(t *testing.T) { + testStringWithoutNewlineSuccess(t, transpileBatch) +} + +func TestMultilineStringSuccess(t *testing.T) { + testMultilineStringSuccess(t, transpileBatch) +} + func TestItoaSuccess(t *testing.T) { testItoaSuccess(t, transpileBatch) }