Thank you for your comment

Beau­tiful Racket / tuto­rials

jsonic/tokenizer.rkt
#lang br/quicklang
(require brag/support)

(define (make-tokenizer port)
  (define (next-token)
    (define jsonic-lexer
      (lexer
       ···))
    (jsonic-lexer port))
  next-token)
(provide make-tokenizer)
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
#lang br/quicklang
(require brag/support)

(define (make-tokenizer port)
  (define (next-token)
    (define jsonic-lexer
      (lexer
       ···))
    (jsonic-lexer port))
  next-token)
(provide make-tokenizer)
copy to clipboard
jsonic/tokenizer.rkt
#lang br/quicklang
(require brag/support)

(define (make-tokenizer port)
  (define (next-token)
    (define jsonic-lexer
      (lexer
       [(from/to "//" "\n") (next-token)]
       ···))
    (jsonic-lexer port))
  next-token)
(provide make-tokenizer)
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
#lang br/quicklang
(require brag/support)

(define (make-tokenizer port)
  (define (next-token)
    (define jsonic-lexer
      (lexer
       [(from/to "//" "\n") (next-token)]
       ···))
    (jsonic-lexer port))
  next-token)
(provide make-tokenizer)
copy to clipboard
jsonic/tokenizer.rkt
#lang br/quicklang
(require brag/support)

(define (make-tokenizer port)
  (define (next-token)
    (define jsonic-lexer
      (lexer
       [(from/to "//" "\n") (next-token)]
       [(from/to "@$" "$@")
        (token 'SEXP-TOK (trim-ends "@$" lexeme "$@"))]
       ···))
    (jsonic-lexer port))
  next-token)
(provide make-tokenizer)
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
#lang br/quicklang
(require brag/support)

(define (make-tokenizer port)
  (define (next-token)
    (define jsonic-lexer
      (lexer
       [(from/to "//" "\n") (next-token)]
       [(from/to "@$" "$@")
        (token 'SEXP-TOK (trim-ends "@$" lexeme "$@"))]
       ···))
    (jsonic-lexer port))
  next-token)
(provide make-tokenizer)
copy to clipboard
jsonic/tokenizer.rkt
#lang br/quicklang
(require brag/support)

(define (make-tokenizer port)
  (define (next-token)
    (define jsonic-lexer
      (lexer
       [(from/to "//" "\n") (next-token)]
       [(from/to "@$" "$@")
        (token 'SEXP-TOK (trim-ends "@$" lexeme "$@"))]
       [any-char (token 'CHAR-TOK lexeme)]))
    (jsonic-lexer port))
  next-token)
(provide make-tokenizer)
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
#lang br/quicklang
(require brag/support)

(define (make-tokenizer port)
  (define (next-token)
    (define jsonic-lexer
      (lexer
       [(from/to "//" "\n") (next-token)]
       [(from/to "@$" "$@")
        (token 'SEXP-TOK (trim-ends "@$" lexeme "$@"))]
       [any-char (token 'CHAR-TOK lexeme)]))
    (jsonic-lexer port))  
  next-token)
(provide make-tokenizer)
copy to clipboard
← prev next →