Fix missing database driver, enhance message template, dockerize application
This commit is contained in:
parent
bfd95f28c7
commit
a31cbcdf01
|
@ -0,0 +1,6 @@
|
|||
.dockerignore
|
||||
.git*
|
||||
Dockerfile
|
||||
data
|
||||
docker-compose.yml
|
||||
matrix-prometheus
|
|
@ -20,3 +20,5 @@
|
|||
|
||||
# Go workspace file
|
||||
go.work
|
||||
|
||||
/data/
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
FROM golang:1.19 AS builder
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get update && apt-get install -y libolm-dev
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
COPY . .
|
||||
RUN go build -ldflags="-s -w" -v ./cmd/matrix-prometheus
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN groupadd --gid 999 --system matrix-prometheus && useradd --gid 999 --system --uid 999 matrix-prometheus
|
||||
|
||||
RUN apt-get update && apt-get install -y ca-certificates libolm3
|
||||
|
||||
COPY --from=builder /build/matrix-prometheus /usr/local/bin/matrix-prometheus
|
||||
|
||||
USER matrix-prometheus
|
||||
|
||||
VOLUME /data
|
||||
WORKDIR /data
|
||||
|
||||
CMD ["/usr/local/bin/matrix-prometheus"]
|
|
@ -9,6 +9,8 @@ import (
|
|||
"sync"
|
||||
"syscall"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
|
||||
"git.luj0ga.de/franconian/matrix"
|
||||
"git.luj0ga.de/luca/matrix-prometheus/internal/config"
|
||||
"git.luj0ga.de/luca/matrix-prometheus/internal/webhook"
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
|
||||
services:
|
||||
bot:
|
||||
build: .
|
||||
image: matrix-prometheus
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
ALLOWED_ROOMS:
|
||||
DATABASE: /data/db.sqlite3
|
||||
DEVICE_NAME: $HOSTNAME
|
||||
HOMESERVER: 'https://matrix.org'
|
||||
PASSWORD:
|
||||
PICKLE_KEY: 1n5ecure-key-pl34se-change
|
||||
USER_ID:
|
||||
ports:
|
||||
- 127.0.0.1:8000:8000
|
||||
volumes:
|
||||
- ./data:/data
|
8
go.mod
8
go.mod
|
@ -2,7 +2,11 @@ module git.luj0ga.de/luca/matrix-prometheus
|
|||
|
||||
go 1.19
|
||||
|
||||
require git.luj0ga.de/franconian/matrix v0.0.0-20230113141720-688cc670ca7d
|
||||
require (
|
||||
git.luj0ga.de/franconian/matrix v0.0.0-20230113141720-688cc670ca7d
|
||||
github.com/mattn/go-sqlite3 v1.14.13
|
||||
maunium.net/go/mautrix v0.11.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/gorilla/mux v1.8.0 // indirect
|
||||
|
@ -11,9 +15,9 @@ require (
|
|||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tidwall/sjson v1.2.4 // indirect
|
||||
github.com/yuin/goldmark v1.4.12 // indirect
|
||||
golang.org/x/crypto v0.0.0-20220513210258-46612604a0f9 // indirect
|
||||
golang.org/x/net v0.0.0-20220513224357-95641704303c // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
maunium.net/go/maulogger/v2 v2.3.2 // indirect
|
||||
maunium.net/go/mautrix v0.11.0 // indirect
|
||||
)
|
||||
|
|
3
go.sum
3
go.sum
|
@ -6,6 +6,7 @@ github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB7
|
|||
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/mattn/go-sqlite3 v1.14.13 h1:1tj15ngiFfcZzii7yd82foL+ks+ouQcj8j/TPq3fk1I=
|
||||
github.com/mattn/go-sqlite3 v1.14.13/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY=
|
||||
github.com/tidwall/gjson v1.12.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
|
@ -17,6 +18,8 @@ github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
|
|||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/sjson v1.2.4 h1:cuiLzLnaMeBhRmEv00Lpk3tkYrcxpmbU81tAY4Dw0tc=
|
||||
github.com/tidwall/sjson v1.2.4/go.mod h1:098SZ494YoMWPmMO6ct4dcFnqxwj9r/gF0Etp19pSNM=
|
||||
github.com/yuin/goldmark v1.4.12 h1:6hffw6vALvEDqJ19dOJvJKOoAOKe4NDaTqvd2sktGN0=
|
||||
github.com/yuin/goldmark v1.4.12/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
golang.org/x/crypto v0.0.0-20220513210258-46612604a0f9 h1:NUzdAbFtCJSXU20AOXgeqaUwg8Ypg4MPYmL+d+rsB5c=
|
||||
golang.org/x/crypto v0.0.0-20220513210258-46612604a0f9/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/net v0.0.0-20220513224357-95641704303c h1:nF9mHSvoKBLkQNQhJZNsc66z2UzAMUbLGjC95CF3pU0=
|
||||
|
|
|
@ -6,12 +6,18 @@ import (
|
|||
"log"
|
||||
"net/http"
|
||||
|
||||
"maunium.net/go/mautrix/event"
|
||||
"maunium.net/go/mautrix/format"
|
||||
)
|
||||
|
||||
const (
|
||||
messageTemplate = `@room Alerts for group "{{ .GroupKey }}":{{ range .Alerts }}
|
||||
* {{ .Annotations.Description }}{{ end }}`
|
||||
messageTemplate = "\U0001f6a8" +
|
||||
` **Alerts for group "_{{ .GroupKey }}_":**{{ range .Alerts }}
|
||||
` + "\u2022" + ` {{ with .Labels.severity }}{{ if eq . "critical" }}` + "\U0001f525" +
|
||||
`{{ else if eq . "warning" }}` + "\U0001f4e2" +
|
||||
`{{ else }}` + "\U0001f514" +
|
||||
`{{ end }}{{ else }}` + "\U0001f514" +
|
||||
`{{ end }} {{ .Annotations.description }}{{ end }}
|
||||
@room`
|
||||
)
|
||||
|
||||
func (s Server) handleWebhook(w http.ResponseWriter, r *http.Request) {
|
||||
|
@ -47,10 +53,7 @@ func (s Server) handleWebhook(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
evt := event.MessageEventContent{
|
||||
MsgType: event.MsgText,
|
||||
Body: message.String(),
|
||||
}
|
||||
evt := format.RenderMarkdown(message.String(), true, false)
|
||||
|
||||
success := s.matrix.Broadcast(&evt)
|
||||
if !success {
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
coverage:
|
||||
status:
|
||||
project: off
|
||||
patch: off
|
|
@ -0,0 +1,14 @@
|
|||
*.db
|
||||
*.exe
|
||||
*.dll
|
||||
*.o
|
||||
|
||||
# VSCode
|
||||
.vscode
|
||||
|
||||
# Exclude from upgrade
|
||||
upgrade/*.c
|
||||
upgrade/*.h
|
||||
|
||||
# Exclude upgrade binary
|
||||
upgrade/upgrade
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Yasuhiro Matsumoto
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -0,0 +1,591 @@
|
|||
go-sqlite3
|
||||
==========
|
||||
|
||||
[![GoDoc Reference](https://godoc.org/github.com/mattn/go-sqlite3?status.svg)](http://godoc.org/github.com/mattn/go-sqlite3)
|
||||
[![GitHub Actions](https://github.com/mattn/go-sqlite3/workflows/Go/badge.svg)](https://github.com/mattn/go-sqlite3/actions?query=workflow%3AGo)
|
||||
[![Financial Contributors on Open Collective](https://opencollective.com/mattn-go-sqlite3/all/badge.svg?label=financial+contributors)](https://opencollective.com/mattn-go-sqlite3)
|
||||
[![codecov](https://codecov.io/gh/mattn/go-sqlite3/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-sqlite3)
|
||||
[![Go Report Card](https://goreportcard.com/badge/github.com/mattn/go-sqlite3)](https://goreportcard.com/report/github.com/mattn/go-sqlite3)
|
||||
|
||||
Latest stable version is v1.14 or later, not v2.
|
||||
|
||||
~~**NOTE:** The increase to v2 was an accident. There were no major changes or features.~~
|
||||
|
||||
# Description
|
||||
|
||||
A sqlite3 driver that conforms to the built-in database/sql interface.
|
||||
|
||||
Supported Golang version: See [.github/workflows/go.yaml](./.github/workflows/go.yaml).
|
||||
|
||||
This package follows the official [Golang Release Policy](https://golang.org/doc/devel/release.html#policy).
|
||||
|
||||
### Overview
|
||||
|
||||
- [go-sqlite3](#go-sqlite3)
|
||||
- [Description](#description)
|
||||
- [Overview](#overview)
|
||||
- [Installation](#installation)
|
||||
- [API Reference](#api-reference)
|
||||
- [Connection String](#connection-string)
|
||||
- [DSN Examples](#dsn-examples)
|
||||
- [Features](#features)
|
||||
- [Usage](#usage)
|
||||
- [Feature / Extension List](#feature--extension-list)
|
||||
- [Compilation](#compilation)
|
||||
- [Android](#android)
|
||||
- [ARM](#arm)
|
||||
- [Cross Compile](#cross-compile)
|
||||
- [Google Cloud Platform](#google-cloud-platform)
|
||||
- [Linux](#linux)
|
||||
- [Alpine](#alpine)
|
||||
- [Fedora](#fedora)
|
||||
- [Ubuntu](#ubuntu)
|
||||
- [Mac OSX](#mac-osx)
|
||||
- [Windows](#windows)
|
||||
- [Errors](#errors)
|
||||
- [User Authentication](#user-authentication)
|
||||
- [Compile](#compile)
|
||||
- [Usage](#usage-1)
|
||||
- [Create protected database](#create-protected-database)
|
||||
- [Password Encoding](#password-encoding)
|
||||
- [Available Encoders](#available-encoders)
|
||||
- [Restrictions](#restrictions)
|
||||
- [Support](#support)
|
||||
- [User Management](#user-management)
|
||||
- [SQL](#sql)
|
||||
- [Examples](#examples)
|
||||
- [*SQLiteConn](#sqliteconn)
|
||||
- [Attached database](#attached-database)
|
||||
- [Extensions](#extensions)
|
||||
- [Spatialite](#spatialite)
|
||||
- [FAQ](#faq)
|
||||
- [License](#license)
|
||||
- [Author](#author)
|
||||
|
||||
# Installation
|
||||
|
||||
This package can be installed with the `go get` command:
|
||||
|
||||
go get github.com/mattn/go-sqlite3
|
||||
|
||||
_go-sqlite3_ is *cgo* package.
|
||||
If you want to build your app using go-sqlite3, you need gcc.
|
||||
However, after you have built and installed _go-sqlite3_ with `go install github.com/mattn/go-sqlite3` (which requires gcc), you can build your app without relying on gcc in future.
|
||||
|
||||
***Important: because this is a `CGO` enabled package, you are required to set the environment variable `CGO_ENABLED=1` and have a `gcc` compile present within your path.***
|
||||
|
||||
# API Reference
|
||||
|
||||
API documentation can be found [here](http://godoc.org/github.com/mattn/go-sqlite3).
|
||||
|
||||
Examples can be found under the [examples](./_example) directory.
|
||||
|
||||
# Connection String
|
||||
|
||||
When creating a new SQLite database or connection to an existing one, with the file name additional options can be given.
|
||||
This is also known as a DSN (Data Source Name) string.
|
||||
|
||||
Options are append after the filename of the SQLite database.
|
||||
The database filename and options are separated by an `?` (Question Mark).
|
||||
Options should be URL-encoded (see [url.QueryEscape](https://golang.org/pkg/net/url/#QueryEscape)).
|
||||
|
||||
This also applies when using an in-memory database instead of a file.
|
||||
|
||||
Options can be given using the following format: `KEYWORD=VALUE` and multiple options can be combined with the `&` ampersand.
|
||||
|
||||
This library supports DSN options of SQLite itself and provides additional options.
|
||||
|
||||
Boolean values can be one of:
|
||||
* `0` `no` `false` `off`
|
||||
* `1` `yes` `true` `on`
|
||||
|
||||
| Name | Key | Value(s) | Description |
|
||||
|------|-----|----------|-------------|
|
||||
| UA - Create | `_auth` | - | Create User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| UA - Username | `_auth_user` | `string` | Username for User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| UA - Password | `_auth_pass` | `string` | Password for User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| UA - Crypt | `_auth_crypt` | <ul><li>SHA1</li><li>SSHA1</li><li>SHA256</li><li>SSHA256</li><li>SHA384</li><li>SSHA384</li><li>SHA512</li><li>SSHA512</li></ul> | Password encoder to use for User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| UA - Salt | `_auth_salt` | `string` | Salt to use if the configure password encoder requires a salt, for User Authentication, for more information see [User Authentication](#user-authentication) |
|
||||
| Auto Vacuum | `_auto_vacuum` \| `_vacuum` | <ul><li>`0` \| `none`</li><li>`1` \| `full`</li><li>`2` \| `incremental`</li></ul> | For more information see [PRAGMA auto_vacuum](https://www.sqlite.org/pragma.html#pragma_auto_vacuum) |
|
||||
| Busy Timeout | `_busy_timeout` \| `_timeout` | `int` | Specify value for sqlite3_busy_timeout. For more information see [PRAGMA busy_timeout](https://www.sqlite.org/pragma.html#pragma_busy_timeout) |
|
||||
| Case Sensitive LIKE | `_case_sensitive_like` \| `_cslike` | `boolean` | For more information see [PRAGMA case_sensitive_like](https://www.sqlite.org/pragma.html#pragma_case_sensitive_like) |
|
||||
| Defer Foreign Keys | `_defer_foreign_keys` \| `_defer_fk` | `boolean` | For more information see [PRAGMA defer_foreign_keys](https://www.sqlite.org/pragma.html#pragma_defer_foreign_keys) |
|
||||
| Foreign Keys | `_foreign_keys` \| `_fk` | `boolean` | For more information see [PRAGMA foreign_keys](https://www.sqlite.org/pragma.html#pragma_foreign_keys) |
|
||||
| Ignore CHECK Constraints | `_ignore_check_constraints` | `boolean` | For more information see [PRAGMA ignore_check_constraints](https://www.sqlite.org/pragma.html#pragma_ignore_check_constraints) |
|
||||
| Immutable | `immutable` | `boolean` | For more information see [Immutable](https://www.sqlite.org/c3ref/open.html) |
|
||||
| Journal Mode | `_journal_mode` \| `_journal` | <ul><li>DELETE</li><li>TRUNCATE</li><li>PERSIST</li><li>MEMORY</li><li>WAL</li><li>OFF</li></ul> | For more information see [PRAGMA journal_mode](https://www.sqlite.org/pragma.html#pragma_journal_mode) |
|
||||
| Locking Mode | `_locking_mode` \| `_locking` | <ul><li>NORMAL</li><li>EXCLUSIVE</li></ul> | For more information see [PRAGMA locking_mode](https://www.sqlite.org/pragma.html#pragma_locking_mode) |
|
||||
| Mode | `mode` | <ul><li>ro</li><li>rw</li><li>rwc</li><li>memory</li></ul> | Access Mode of the database. For more information see [SQLite Open](https://www.sqlite.org/c3ref/open.html) |
|
||||
| Mutex Locking | `_mutex` | <ul><li>no</li><li>full</li></ul> | Specify mutex mode. |
|
||||
| Query Only | `_query_only` | `boolean` | For more information see [PRAGMA query_only](https://www.sqlite.org/pragma.html#pragma_query_only) |
|
||||
| Recursive Triggers | `_recursive_triggers` \| `_rt` | `boolean` | For more information see [PRAGMA recursive_triggers](https://www.sqlite.org/pragma.html#pragma_recursive_triggers) |
|
||||
| Secure Delete | `_secure_delete` | `boolean` \| `FAST` | For more information see [PRAGMA secure_delete](https://www.sqlite.org/pragma.html#pragma_secure_delete) |
|
||||
| Shared-Cache Mode | `cache` | <ul><li>shared</li><li>private</li></ul> | Set cache mode for more information see [sqlite.org](https://www.sqlite.org/sharedcache.html) |
|
||||
| Synchronous | `_synchronous` \| `_sync` | <ul><li>0 \| OFF</li><li>1 \| NORMAL</li><li>2 \| FULL</li><li>3 \| EXTRA</li></ul> | For more information see [PRAGMA synchronous](https://www.sqlite.org/pragma.html#pragma_synchronous) |
|
||||
| Time Zone Location | `_loc` | auto | Specify location of time format. |
|
||||
| Transaction Lock | `_txlock` | <ul><li>immediate</li><li>deferred</li><li>exclusive</li></ul> | Specify locking behavior for transactions. |
|
||||
| Writable Schema | `_writable_schema` | `Boolean` | When this pragma is on, the SQLITE_MASTER tables in which database can be changed using ordinary UPDATE, INSERT, and DELETE statements. Warning: misuse of this pragma can easily result in a corrupt database file. |
|
||||
| Cache Size | `_cache_size` | `int` | Maximum cache size; default is 2000K (2M). See [PRAGMA cache_size](https://sqlite.org/pragma.html#pragma_cache_size) |
|
||||
|
||||
|
||||
## DSN Examples
|
||||
|
||||
```
|
||||
file:test.db?cache=shared&mode=memory
|
||||
```
|
||||
|
||||
# Features
|
||||
|
||||
This package allows additional configuration of features available within SQLite3 to be enabled or disabled by golang build constraints also known as build `tags`.
|
||||
|
||||
Click [here](https://golang.org/pkg/go/build/#hdr-Build_Constraints) for more information about build tags / constraints.
|
||||
|
||||
### Usage
|
||||
|
||||
If you wish to build this library with additional extensions / features, use the following command:
|
||||
|
||||
```bash
|
||||
go build --tags "<FEATURE>"
|
||||
```
|
||||
|
||||
For available features, see the extension list.
|
||||
When using multiple build tags, all the different tags should be space delimited.
|
||||
|
||||
Example:
|
||||
|
||||
```bash
|
||||
go build --tags "icu json1 fts5 secure_delete"
|
||||
```
|
||||
|
||||
### Feature / Extension List
|
||||
|
||||
| Extension | Build Tag | Description |
|
||||
|-----------|-----------|-------------|
|
||||
| Additional Statistics | sqlite_stat4 | This option adds additional logic to the ANALYZE command and to the query planner that can help SQLite to chose a better query plan under certain situations. The ANALYZE command is enhanced to collect histogram data from all columns of every index and store that data in the sqlite_stat4 table.<br><br>The query planner will then use the histogram data to help it make better index choices. The downside of this compile-time option is that it violates the query planner stability guarantee making it more difficult to ensure consistent performance in mass-produced applications.<br><br>SQLITE_ENABLE_STAT4 is an enhancement of SQLITE_ENABLE_STAT3. STAT3 only recorded histogram data for the left-most column of each index whereas the STAT4 enhancement records histogram data from all columns of each index.<br><br>The SQLITE_ENABLE_STAT3 compile-time option is a no-op and is ignored if the SQLITE_ENABLE_STAT4 compile-time option is used |
|
||||
| Allow URI Authority | sqlite_allow_uri_authority | URI filenames normally throws an error if the authority section is not either empty or "localhost".<br><br>However, if SQLite is compiled with the SQLITE_ALLOW_URI_AUTHORITY compile-time option, then the URI is converted into a Uniform Naming Convention (UNC) filename and passed down to the underlying operating system that way |
|
||||
| App Armor | sqlite_app_armor | When defined, this C-preprocessor macro activates extra code that attempts to detect misuse of the SQLite API, such as passing in NULL pointers to required parameters or using objects after they have been destroyed. <br><br>App Armor is not available under `Windows`. |
|
||||
| Disable Load Extensions | sqlite_omit_load_extension | Loading of external extensions is enabled by default.<br><br>To disable extension loading add the build tag `sqlite_omit_load_extension`. |
|
||||
| Foreign Keys | sqlite_foreign_keys | This macro determines whether enforcement of foreign key constraints is enabled or disabled by default for new database connections.<br><br>Each database connection can always turn enforcement of foreign key constraints on and off and run-time using the foreign_keys pragma.<br><br>Enforcement of foreign key constraints is normally off by default, but if this compile-time parameter is set to 1, enforcement of foreign key constraints will be on by default |
|
||||
| Full Auto Vacuum | sqlite_vacuum_full | Set the default auto vacuum to full |
|
||||
| Incremental Auto Vacuum | sqlite_vacuum_incr | Set the default auto vacuum to incremental |
|
||||
| Full Text Search Engine | sqlite_fts5 | When this option is defined in the amalgamation, versions 5 of the full-text search engine (fts5) is added to the build automatically |
|
||||
| International Components for Unicode | sqlite_icu | This option causes the International Components for Unicode or "ICU" extension to SQLite to be added to the build |
|
||||
| Introspect PRAGMAS | sqlite_introspect | This option adds some extra PRAGMA statements. <ul><li>PRAGMA function_list</li><li>PRAGMA module_list</li><li>PRAGMA pragma_list</li></ul> |
|
||||
| JSON SQL Functions | sqlite_json | When this option is defined in the amalgamation, the JSON SQL functions are added to the build automatically |
|
||||
| Pre Update Hook | sqlite_preupdate_hook | Registers a callback function that is invoked prior to each INSERT, UPDATE, and DELETE operation on a database table. |
|
||||
| Secure Delete | sqlite_secure_delete | This compile-time option changes the default setting of the secure_delete pragma.<br><br>When this option is not used, secure_delete defaults to off. When this option is present, secure_delete defaults to on.<br><br>The secure_delete setting causes deleted content to be overwritten with zeros. There is a small performance penalty since additional I/O must occur.<br><br>On the other hand, secure_delete can prevent fragments of sensitive information from lingering in unused parts of the database file after it has been deleted. See the documentation on the secure_delete pragma for additional information |
|
||||
| Secure Delete (FAST) | sqlite_secure_delete_fast | For more information see [PRAGMA secure_delete](https://www.sqlite.org/pragma.html#pragma_secure_delete) |
|
||||
| Tracing / Debug | sqlite_trace | Activate trace functions |
|
||||
| User Authentication | sqlite_userauth | SQLite User Authentication see [User Authentication](#user-authentication) for more information. |
|
||||
|
||||
# Compilation
|
||||
|
||||
This package requires the `CGO_ENABLED=1` ennvironment variable if not set by default, and the presence of the `gcc` compiler.
|
||||
|
||||
If you need to add additional CFLAGS or LDFLAGS to the build command, and do not want to modify this package, then this can be achieved by using the `CGO_CFLAGS` and `CGO_LDFLAGS` environment variables.
|
||||
|
||||
## Android
|
||||
|
||||
This package can be compiled for android.
|
||||
Compile with:
|
||||
|
||||
```bash
|
||||
go build --tags "android"
|
||||
```
|
||||
|
||||
For more information see [#201](https://github.com/mattn/go-sqlite3/issues/201)
|
||||
|
||||
# ARM
|
||||
|
||||
To compile for `ARM` use the following environment:
|
||||
|
||||
```bash
|
||||
env CC=arm-linux-gnueabihf-gcc CXX=arm-linux-gnueabihf-g++ \
|
||||
CGO_ENABLED=1 GOOS=linux GOARCH=arm GOARM=7 \
|
||||
go build -v
|
||||
```
|
||||
|
||||
Additional information:
|
||||
- [#242](https://github.com/mattn/go-sqlite3/issues/242)
|
||||
- [#504](https://github.com/mattn/go-sqlite3/issues/504)
|
||||
|
||||
# Cross Compile
|
||||
|
||||
This library can be cross-compiled.
|
||||
|
||||
In some cases you are required to the `CC` environment variable with the cross compiler.
|
||||
|
||||
## Cross Compiling from MAC OSX
|
||||
The simplest way to cross compile from OSX is to use [xgo](https://github.com/karalabe/xgo).
|
||||
|
||||
Steps:
|
||||
- Install [xgo](https://github.com/karalabe/xgo) (`go get github.com/karalabe/xgo`).
|
||||
- Ensure that your project is within your `GOPATH`.
|
||||
- Run `xgo local/path/to/project`.
|
||||
|
||||
Please refer to the project's [README](https://github.com/karalabe/xgo/blob/master/README.md) for further information.
|
||||
|
||||
# Google Cloud Platform
|
||||
|
||||
Building on GCP is not possible because Google Cloud Platform does not allow `gcc` to be executed.
|
||||
|
||||
Please work only with compiled final binaries.
|
||||
|
||||
## Linux
|
||||
|
||||
To compile this package on Linux, you must install the development tools for your linux distribution.
|
||||
|
||||
To compile under linux use the build tag `linux`.
|
||||
|
||||
```bash
|
||||
go build --tags "linux"
|
||||
```
|
||||
|
||||
If you wish to link directly to libsqlite3 then you can use the `libsqlite3` build tag.
|
||||
|
||||
```
|
||||
go build --tags "libsqlite3 linux"
|
||||
```
|
||||
|
||||
### Alpine
|
||||
|
||||
When building in an `alpine` container run the following command before building:
|
||||
|
||||
```
|
||||
apk add --update gcc musl-dev
|
||||
```
|
||||
|
||||
### Fedora
|
||||
|
||||
```bash
|
||||
sudo yum groupinstall "Development Tools" "Development Libraries"
|
||||
```
|
||||
|
||||
### Ubuntu
|
||||
|
||||
```bash
|
||||
sudo apt-get install build-essential
|
||||
```
|
||||
|
||||
## Mac OSX
|
||||
|
||||
OSX should have all the tools present to compile this package. If not, install XCode to add all the developers tools.
|
||||
|
||||
Required dependency:
|
||||
|
||||
```bash
|
||||
brew install sqlite3
|
||||
```
|
||||
|
||||
For OSX, there is an additional package to install which is required if you wish to build the `icu` extension.
|
||||
|
||||
This additional package can be installed with `homebrew`:
|
||||
|
||||
```bash
|
||||
brew upgrade icu4c
|
||||
```
|
||||
|
||||
To compile for Mac OSX:
|
||||
|
||||
```bash
|
||||
go build --tags "darwin"
|
||||
```
|
||||
|
||||
If you wish to link directly to libsqlite3, use the `libsqlite3` build tag:
|
||||
|
||||
```
|
||||
go build --tags "libsqlite3 darwin"
|
||||
```
|
||||
|
||||
Additional information:
|
||||
- [#206](https://github.com/mattn/go-sqlite3/issues/206)
|
||||
- [#404](https://github.com/mattn/go-sqlite3/issues/404)
|
||||
|
||||
## Windows
|
||||
|
||||
To compile this package on Windows, you must have the `gcc` compiler installed.
|
||||
|
||||
1) Install a Windows `gcc` toolchain.
|
||||
2) Add the `bin` folder to the Windows path, if the installer did not do this by default.
|
||||
3) Open a terminal for the TDM-GCC toolchain, which can be found in the Windows Start menu.
|
||||
4) Navigate to your project folder and run the `go build ...` command for this package.
|
||||
|
||||
For example the TDM-GCC Toolchain can be found [here](https://jmeubank.github.io/tdm-gcc/).
|
||||
|
||||
## Errors
|
||||
|
||||
- Compile error: `can not be used when making a shared object; recompile with -fPIC`
|
||||
|
||||
When receiving a compile time error referencing recompile with `-FPIC` then you
|
||||
are probably using a hardend system.
|
||||
|
||||
You can compile the library on a hardend system with the following command.
|
||||
|
||||
```bash
|
||||
go build -ldflags '-extldflags=-fno-PIC'
|
||||
```
|
||||
|
||||
More details see [#120](https://github.com/mattn/go-sqlite3/issues/120)
|
||||
|
||||
- Can't build go-sqlite3 on windows 64bit.
|
||||
|
||||
> Probably, you are using go 1.0, go1.0 has a problem when it comes to compiling/linking on windows 64bit.
|
||||
> See: [#27](https://github.com/mattn/go-sqlite3/issues/27)
|
||||
|
||||
- `go get github.com/mattn/go-sqlite3` throws compilation error.
|
||||
|
||||
`gcc` throws: `internal compiler error`
|
||||
|
||||
Remove the download repository from your disk and try re-install with:
|
||||
|
||||
```bash
|
||||
go install github.com/mattn/go-sqlite3
|
||||
```
|
||||
|
||||
# User Authentication
|
||||
|
||||
This package supports the SQLite User Authentication module.
|
||||
|
||||
## Compile
|
||||
|
||||
To use the User authentication module, the package has to be compiled with the tag `sqlite_userauth`. See [Features](#features).
|
||||
|
||||
## Usage
|
||||
|
||||
### Create protected database
|
||||
|
||||
To create a database protected by user authentication, provide the following argument to the connection string `_auth`.
|
||||
This will enable user authentication within the database. This option however requires two additional arguments:
|
||||
|
||||
- `_auth_user`
|
||||
- `_auth_pass`
|
||||
|
||||
When `_auth` is present in the connection string user authentication will be enabled and the provided user will be created
|
||||
as an `admin` user. After initial creation, the parameter `_auth` has no effect anymore and can be omitted from the connection string.
|
||||
|
||||
Example connection strings:
|
||||
|
||||
Create an user authentication database with user `admin` and password `admin`:
|
||||
|
||||
`file:test.s3db?_auth&_auth_user=admin&_auth_pass=admin`
|
||||
|
||||
Create an user authentication database with user `admin` and password `admin` and use `SHA1` for the password encoding:
|
||||
|
||||
`file:test.s3db?_auth&_auth_user=admin&_auth_pass=admin&_auth_crypt=sha1`
|
||||
|
||||
### Password Encoding
|
||||
|
||||
The passwords within the user authentication module of SQLite are encoded with the SQLite function `sqlite_cryp`.
|
||||
This function uses a ceasar-cypher which is quite insecure.
|
||||
This library provides several additional password encoders which can be configured through the connection string.
|
||||
|
||||
The password cypher can be configured with the key `_auth_crypt`. And if the configured password encoder also requires an
|
||||
salt this can be configured with `_auth_salt`.
|
||||
|
||||
#### Available Encoders
|
||||
|
||||
- SHA1
|
||||
- SSHA1 (Salted SHA1)
|
||||
- SHA256
|
||||
- SSHA256 (salted SHA256)
|
||||
- SHA384
|
||||
- SSHA384 (salted SHA384)
|
||||
- SHA512
|
||||
- SSHA512 (salted SHA512)
|
||||
|
||||
### Restrictions
|
||||
|
||||
Operations on the database regarding user management can only be preformed by an administrator user.
|
||||
|
||||
### Support
|
||||
|
||||
The user authentication supports two kinds of users:
|
||||
|
||||
- administrators
|
||||
- regular users
|
||||
|
||||
### User Management
|
||||
|
||||
User management can be done by directly using the `*SQLiteConn` or by SQL.
|
||||
|
||||
#### SQL
|
||||
|
||||
The following sql functions are available for user management:
|
||||
|
||||
| Function | Arguments | Description |
|
||||
|----------|-----------|-------------|
|
||||
| `authenticate` | username `string`, password `string` | Will authenticate an user, this is done by the connection; and should not be used manually. |
|
||||
| `auth_user_add` | username `string`, password `string`, admin `int` | This function will add an user to the database.<br>if the database is not protected by user authentication it will enable it. Argument `admin` is an integer identifying if the added user should be an administrator. Only Administrators can add administrators. |
|
||||
| `auth_user_change` | username `string`, password `string`, admin `int` | Function to modify an user. Users can change their own password, but only an administrator can change the administrator flag. |
|
||||
| `authUserDelete` | username `string` | Delete an user from the database. Can only be used by an administrator. The current logged in administrator cannot be deleted. This is to make sure their is always an administrator remaining. |
|
||||
|
||||
These functions will return an integer:
|
||||
|
||||
- 0 (SQLITE_OK)
|
||||
- 23 (SQLITE_AUTH) Failed to perform due to authentication or insufficient privileges
|
||||
|
||||
##### Examples
|
||||
|
||||
```sql
|
||||
// Autheticate user
|
||||
// Create Admin User
|
||||
SELECT auth_user_add('admin2', 'admin2', 1);
|
||||
|
||||
// Change password for user
|
||||
SELECT auth_user_change('user', 'userpassword', 0);
|
||||
|
||||
// Delete user
|
||||
SELECT user_delete('user');
|
||||
```
|
||||
|
||||
#### *SQLiteConn
|
||||
|
||||
The following functions are available for User authentication from the `*SQLiteConn`:
|
||||
|
||||
| Function | Description |
|
||||
|----------|-------------|
|
||||
| `Authenticate(username, password string) error` | Authenticate user |
|
||||
| `AuthUserAdd(username, password string, admin bool) error` | Add user |
|
||||
| `AuthUserChange(username, password string, admin bool) error` | Modify user |
|
||||
| `AuthUserDelete(username string) error` | Delete user |
|
||||
|
||||
### Attached database
|
||||
|
||||
When using attached databases, SQLite will use the authentication from the `main` database for the attached database(s).
|
||||
|
||||
# Extensions
|
||||
|
||||
If you want your own extension to be listed here, or you want to add a reference to an extension; please submit an Issue for this.
|
||||
|
||||
## Spatialite
|
||||
|
||||
Spatialite is available as an extension to SQLite, and can be used in combination with this repository.
|
||||
For an example, see [shaxbee/go-spatialite](https://github.com/shaxbee/go-spatialite).
|
||||
|
||||
## extension-functions.c from SQLite3 Contrib
|
||||
|
||||
extension-functions.c is available as an extension to SQLite, and provides the following functions:
|
||||
|
||||
- Math: acos, asin, atan, atn2, atan2, acosh, asinh, atanh, difference, degrees, radians, cos, sin, tan, cot, cosh, sinh, tanh, coth, exp, log, log10, power, sign, sqrt, square, ceil, floor, pi.
|
||||
- String: replicate, charindex, leftstr, rightstr, ltrim, rtrim, trim, replace, reverse, proper, padl, padr, padc, strfilter.
|
||||
- Aggregate: stdev, variance, mode, median, lower_quartile, upper_quartile
|
||||
|
||||
For an example, see [dinedal/go-sqlite3-extension-functions](https://github.com/dinedal/go-sqlite3-extension-functions).
|
||||
|
||||
# FAQ
|
||||
|
||||
- Getting insert error while query is opened.
|
||||
|
||||
> You can pass some arguments into the connection string, for example, a URI.
|
||||
> See: [#39](https://github.com/mattn/go-sqlite3/issues/39)
|
||||
|
||||
- Do you want to cross compile? mingw on Linux or Mac?
|
||||
|
||||
> See: [#106](https://github.com/mattn/go-sqlite3/issues/106)
|
||||
> See also: http://www.limitlessfx.com/cross-compile-golang-app-for-windows-from-linux.html
|
||||
|
||||
- Want to get time.Time with current locale
|
||||
|
||||
Use `_loc=auto` in SQLite3 filename schema like `file:foo.db?_loc=auto`.
|
||||
|
||||
- Can I use this in multiple routines concurrently?
|
||||
|
||||
Yes for readonly. But not for writable. See [#50](https://github.com/mattn/go-sqlite3/issues/50), [#51](https://github.com/mattn/go-sqlite3/issues/51), [#209](https://github.com/mattn/go-sqlite3/issues/209), [#274](https://github.com/mattn/go-sqlite3/issues/274).
|
||||
|
||||
- Why I'm getting `no such table` error?
|
||||
|
||||
Why is it racy if I use a `sql.Open("sqlite3", ":memory:")` database?
|
||||
|
||||
Each connection to `":memory:"` opens a brand new in-memory sql database, so if
|
||||
the stdlib's sql engine happens to open another connection and you've only
|
||||
specified `":memory:"`, that connection will see a brand new database. A
|
||||
workaround is to use `"file::memory:?cache=shared"` (or `"file:foobar?mode=memory&cache=shared"`). Every
|
||||
connection to this string will point to the same in-memory database.
|
||||
|
||||
Note that if the last database connection in the pool closes, the in-memory database is deleted. Make sure the [max idle connection limit](https://golang.org/pkg/database/sql/#DB.SetMaxIdleConns) is > 0, and the [connection lifetime](https://golang.org/pkg/database/sql/#DB.SetConnMaxLifetime) is infinite.
|
||||
|
||||
For more information see:
|
||||
* [#204](https://github.com/mattn/go-sqlite3/issues/204)
|
||||
* [#511](https://github.com/mattn/go-sqlite3/issues/511)
|
||||
* https://www.sqlite.org/sharedcache.html#shared_cache_and_in_memory_databases
|
||||
* https://www.sqlite.org/inmemorydb.html#sharedmemdb
|
||||
|
||||
- Reading from database with large amount of goroutines fails on OSX.
|
||||
|
||||
OS X limits OS-wide to not have more than 1000 files open simultaneously by default.
|
||||
|
||||
For more information, see [#289](https://github.com/mattn/go-sqlite3/issues/289)
|
||||
|
||||
- Trying to execute a `.` (dot) command throws an error.
|
||||
|
||||
Error: `Error: near ".": syntax error`
|
||||
Dot command are part of SQLite3 CLI, not of this library.
|
||||
|
||||
You need to implement the feature or call the sqlite3 cli.
|
||||
|
||||
More information see [#305](https://github.com/mattn/go-sqlite3/issues/305).
|
||||
|
||||
- Error: `database is locked`
|
||||
|
||||
When you get a database is locked, please use the following options.
|
||||
|
||||
Add to DSN: `cache=shared`
|
||||
|
||||
Example:
|
||||
```go
|
||||
db, err := sql.Open("sqlite3", "file:locked.sqlite?cache=shared")
|
||||
```
|
||||
|
||||
Next, please set the database connections of the SQL package to 1:
|
||||
|
||||
```go
|
||||
db.SetMaxOpenConns(1)
|
||||
```
|
||||
|
||||
For more information, see [#209](https://github.com/mattn/go-sqlite3/issues/209).
|
||||
|
||||
## Contributors
|
||||
|
||||
### Code Contributors
|
||||
|
||||
This project exists thanks to all the people who [[contribute](CONTRIBUTING.md)].
|
||||
<a href="https://github.com/mattn/go-sqlite3/graphs/contributors"><img src="https://opencollective.com/mattn-go-sqlite3/contributors.svg?width=890&button=false" /></a>
|
||||
|
||||
### Financial Contributors
|
||||
|
||||
Become a financial contributor and help us sustain our community. [[Contribute here](https://opencollective.com/mattn-go-sqlite3/contribute)].
|
||||
|
||||
#### Individuals
|
||||
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3"><img src="https://opencollective.com/mattn-go-sqlite3/individuals.svg?width=890"></a>
|
||||
|
||||
#### Organizations
|
||||
|
||||
Support this project with your organization. Your logo will show up here with a link to your website. [[Contribute](https://opencollective.com/mattn-go-sqlite3/contribute)]
|
||||
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/0/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/0/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/1/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/1/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/2/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/2/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/3/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/3/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/4/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/4/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/5/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/5/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/6/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/7/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/8/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/mattn-go-sqlite3/organization/9/website"><img src="https://opencollective.com/mattn-go-sqlite3/organization/9/avatar.svg"></a>
|
||||
|
||||
# License
|
||||
|
||||
MIT: http://mattn.mit-license.org/2018
|
||||
|
||||
sqlite3-binding.c, sqlite3-binding.h, sqlite3ext.h
|
||||
|
||||
The -binding suffix was added to avoid build failures under gccgo.
|
||||
|
||||
In this repository, those files are an amalgamation of code that was copied from SQLite3. The license of that code is the same as the license of SQLite3.
|
||||
|
||||
# Author
|
||||
|
||||
Yasuhiro Matsumoto (a.k.a mattn)
|
||||
|
||||
G.J.R. Timmer
|
|
@ -0,0 +1,85 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"runtime"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// SQLiteBackup implement interface of Backup.
|
||||
type SQLiteBackup struct {
|
||||
b *C.sqlite3_backup
|
||||
}
|
||||
|
||||
// Backup make backup from src to dest.
|
||||
func (destConn *SQLiteConn) Backup(dest string, srcConn *SQLiteConn, src string) (*SQLiteBackup, error) {
|
||||
destptr := C.CString(dest)
|
||||
defer C.free(unsafe.Pointer(destptr))
|
||||
srcptr := C.CString(src)
|
||||
defer C.free(unsafe.Pointer(srcptr))
|
||||
|
||||
if b := C.sqlite3_backup_init(destConn.db, destptr, srcConn.db, srcptr); b != nil {
|
||||
bb := &SQLiteBackup{b: b}
|
||||
runtime.SetFinalizer(bb, (*SQLiteBackup).Finish)
|
||||
return bb, nil
|
||||
}
|
||||
return nil, destConn.lastError()
|
||||
}
|
||||
|
||||
// Step to backs up for one step. Calls the underlying `sqlite3_backup_step`
|
||||
// function. This function returns a boolean indicating if the backup is done
|
||||
// and an error signalling any other error. Done is returned if the underlying
|
||||
// C function returns SQLITE_DONE (Code 101)
|
||||
func (b *SQLiteBackup) Step(p int) (bool, error) {
|
||||
ret := C.sqlite3_backup_step(b.b, C.int(p))
|
||||
if ret == C.SQLITE_DONE {
|
||||
return true, nil
|
||||
} else if ret != 0 && ret != C.SQLITE_LOCKED && ret != C.SQLITE_BUSY {
|
||||
return false, Error{Code: ErrNo(ret)}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// Remaining return whether have the rest for backup.
|
||||
func (b *SQLiteBackup) Remaining() int {
|
||||
return int(C.sqlite3_backup_remaining(b.b))
|
||||
}
|
||||
|
||||
// PageCount return count of pages.
|
||||
func (b *SQLiteBackup) PageCount() int {
|
||||
return int(C.sqlite3_backup_pagecount(b.b))
|
||||
}
|
||||
|
||||
// Finish close backup.
|
||||
func (b *SQLiteBackup) Finish() error {
|
||||
return b.Close()
|
||||
}
|
||||
|
||||
// Close close backup.
|
||||
func (b *SQLiteBackup) Close() error {
|
||||
ret := C.sqlite3_backup_finish(b.b)
|
||||
|
||||
// sqlite3_backup_finish() never fails, it just returns the
|
||||
// error code from previous operations, so clean up before
|
||||
// checking and returning an error
|
||||
b.b = nil
|
||||
runtime.SetFinalizer(b, nil)
|
||||
|
||||
if ret != 0 {
|
||||
return Error{Code: ErrNo(ret)}
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,392 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
// You can't export a Go function to C and have definitions in the C
|
||||
// preamble in the same file, so we have to have callbackTrampoline in
|
||||
// its own file. Because we need a separate file anyway, the support
|
||||
// code for SQLite custom functions is in here.
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
|
||||
void _sqlite3_result_text(sqlite3_context* ctx, const char* s);
|
||||
void _sqlite3_result_blob(sqlite3_context* ctx, const void* b, int l);
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"sync"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
//export callbackTrampoline
|
||||
func callbackTrampoline(ctx *C.sqlite3_context, argc int, argv **C.sqlite3_value) {
|
||||
args := (*[(math.MaxInt32 - 1) / unsafe.Sizeof((*C.sqlite3_value)(nil))]*C.sqlite3_value)(unsafe.Pointer(argv))[:argc:argc]
|
||||
fi := lookupHandle(C.sqlite3_user_data(ctx)).(*functionInfo)
|
||||
fi.Call(ctx, args)
|
||||
}
|
||||
|
||||
//export stepTrampoline
|
||||
func stepTrampoline(ctx *C.sqlite3_context, argc C.int, argv **C.sqlite3_value) {
|
||||
args := (*[(math.MaxInt32 - 1) / unsafe.Sizeof((*C.sqlite3_value)(nil))]*C.sqlite3_value)(unsafe.Pointer(argv))[:int(argc):int(argc)]
|
||||
ai := lookupHandle(C.sqlite3_user_data(ctx)).(*aggInfo)
|
||||
ai.Step(ctx, args)
|
||||
}
|
||||
|
||||
//export doneTrampoline
|
||||
func doneTrampoline(ctx *C.sqlite3_context) {
|
||||
ai := lookupHandle(C.sqlite3_user_data(ctx)).(*aggInfo)
|
||||
ai.Done(ctx)
|
||||
}
|
||||
|
||||
//export compareTrampoline
|
||||
func compareTrampoline(handlePtr unsafe.Pointer, la C.int, a *C.char, lb C.int, b *C.char) C.int {
|
||||
cmp := lookupHandle(handlePtr).(func(string, string) int)
|
||||
return C.int(cmp(C.GoStringN(a, la), C.GoStringN(b, lb)))
|
||||
}
|
||||
|
||||
//export commitHookTrampoline
|
||||
func commitHookTrampoline(handle unsafe.Pointer) int {
|
||||
callback := lookupHandle(handle).(func() int)
|
||||
return callback()
|
||||
}
|
||||
|
||||
//export rollbackHookTrampoline
|
||||
func rollbackHookTrampoline(handle unsafe.Pointer) {
|
||||
callback := lookupHandle(handle).(func())
|
||||
callback()
|
||||
}
|
||||
|
||||
//export updateHookTrampoline
|
||||
func updateHookTrampoline(handle unsafe.Pointer, op int, db *C.char, table *C.char, rowid int64) {
|
||||
callback := lookupHandle(handle).(func(int, string, string, int64))
|
||||
callback(op, C.GoString(db), C.GoString(table), rowid)
|
||||
}
|
||||
|
||||
//export authorizerTrampoline
|
||||
func authorizerTrampoline(handle unsafe.Pointer, op int, arg1 *C.char, arg2 *C.char, arg3 *C.char) int {
|
||||
callback := lookupHandle(handle).(func(int, string, string, string) int)
|
||||
return callback(op, C.GoString(arg1), C.GoString(arg2), C.GoString(arg3))
|
||||
}
|
||||
|
||||
//export preUpdateHookTrampoline
|
||||
func preUpdateHookTrampoline(handle unsafe.Pointer, dbHandle uintptr, op int, db *C.char, table *C.char, oldrowid int64, newrowid int64) {
|
||||
hval := lookupHandleVal(handle)
|
||||
data := SQLitePreUpdateData{
|
||||
Conn: hval.db,
|
||||
Op: op,
|
||||
DatabaseName: C.GoString(db),
|
||||
TableName: C.GoString(table),
|
||||
OldRowID: oldrowid,
|
||||
NewRowID: newrowid,
|
||||
}
|
||||
callback := hval.val.(func(SQLitePreUpdateData))
|
||||
callback(data)
|
||||
}
|
||||
|
||||
// Use handles to avoid passing Go pointers to C.
|
||||
type handleVal struct {
|
||||
db *SQLiteConn
|
||||
val interface{}
|
||||
}
|
||||
|
||||
var handleLock sync.Mutex
|
||||
var handleVals = make(map[unsafe.Pointer]handleVal)
|
||||
|
||||
func newHandle(db *SQLiteConn, v interface{}) unsafe.Pointer {
|
||||
handleLock.Lock()
|
||||
defer handleLock.Unlock()
|
||||
val := handleVal{db: db, val: v}
|
||||
var p unsafe.Pointer = C.malloc(C.size_t(1))
|
||||
if p == nil {
|
||||
panic("can't allocate 'cgo-pointer hack index pointer': ptr == nil")
|
||||
}
|
||||
handleVals[p] = val
|
||||
return p
|
||||
}
|
||||
|
||||
func lookupHandleVal(handle unsafe.Pointer) handleVal {
|
||||
handleLock.Lock()
|
||||
defer handleLock.Unlock()
|
||||
return handleVals[handle]
|
||||
}
|
||||
|
||||
func lookupHandle(handle unsafe.Pointer) interface{} {
|
||||
return lookupHandleVal(handle).val
|
||||
}
|
||||
|
||||
func deleteHandles(db *SQLiteConn) {
|
||||
handleLock.Lock()
|
||||
defer handleLock.Unlock()
|
||||
for handle, val := range handleVals {
|
||||
if val.db == db {
|
||||
delete(handleVals, handle)
|
||||
C.free(handle)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This is only here so that tests can refer to it.
|
||||
type callbackArgRaw C.sqlite3_value
|
||||
|
||||
type callbackArgConverter func(*C.sqlite3_value) (reflect.Value, error)
|
||||
|
||||
type callbackArgCast struct {
|
||||
f callbackArgConverter
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
func (c callbackArgCast) Run(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
val, err := c.f(v)
|
||||
if err != nil {
|
||||
return reflect.Value{}, err
|
||||
}
|
||||
if !val.Type().ConvertibleTo(c.typ) {
|
||||
return reflect.Value{}, fmt.Errorf("cannot convert %s to %s", val.Type(), c.typ)
|
||||
}
|
||||
return val.Convert(c.typ), nil
|
||||
}
|
||||
|
||||
func callbackArgInt64(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
if C.sqlite3_value_type(v) != C.SQLITE_INTEGER {
|
||||
return reflect.Value{}, fmt.Errorf("argument must be an INTEGER")
|
||||
}
|
||||
return reflect.ValueOf(int64(C.sqlite3_value_int64(v))), nil
|
||||
}
|
||||
|
||||
func callbackArgBool(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
if C.sqlite3_value_type(v) != C.SQLITE_INTEGER {
|
||||
return reflect.Value{}, fmt.Errorf("argument must be an INTEGER")
|
||||
}
|
||||
i := int64(C.sqlite3_value_int64(v))
|
||||
val := false
|
||||
if i != 0 {
|
||||
val = true
|
||||
}
|
||||
return reflect.ValueOf(val), nil
|
||||
}
|
||||
|
||||
func callbackArgFloat64(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
if C.sqlite3_value_type(v) != C.SQLITE_FLOAT {
|
||||
return reflect.Value{}, fmt.Errorf("argument must be a FLOAT")
|
||||
}
|
||||
return reflect.ValueOf(float64(C.sqlite3_value_double(v))), nil
|
||||
}
|
||||
|
||||
func callbackArgBytes(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
switch C.sqlite3_value_type(v) {
|
||||
case C.SQLITE_BLOB:
|
||||
l := C.sqlite3_value_bytes(v)
|
||||
p := C.sqlite3_value_blob(v)
|
||||
return reflect.ValueOf(C.GoBytes(p, l)), nil
|
||||
case C.SQLITE_TEXT:
|
||||
l := C.sqlite3_value_bytes(v)
|
||||
c := unsafe.Pointer(C.sqlite3_value_text(v))
|
||||
return reflect.ValueOf(C.GoBytes(c, l)), nil
|
||||
default:
|
||||
return reflect.Value{}, fmt.Errorf("argument must be BLOB or TEXT")
|
||||
}
|
||||
}
|
||||
|
||||
func callbackArgString(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
switch C.sqlite3_value_type(v) {
|
||||
case C.SQLITE_BLOB:
|
||||
l := C.sqlite3_value_bytes(v)
|
||||
p := (*C.char)(C.sqlite3_value_blob(v))
|
||||
return reflect.ValueOf(C.GoStringN(p, l)), nil
|
||||
case C.SQLITE_TEXT:
|
||||
c := (*C.char)(unsafe.Pointer(C.sqlite3_value_text(v)))
|
||||
return reflect.ValueOf(C.GoString(c)), nil
|
||||
default:
|
||||
return reflect.Value{}, fmt.Errorf("argument must be BLOB or TEXT")
|
||||
}
|
||||
}
|
||||
|
||||
func callbackArgGeneric(v *C.sqlite3_value) (reflect.Value, error) {
|
||||
switch C.sqlite3_value_type(v) {
|
||||
case C.SQLITE_INTEGER:
|
||||
return callbackArgInt64(v)
|
||||
case C.SQLITE_FLOAT:
|
||||
return callbackArgFloat64(v)
|
||||
case C.SQLITE_TEXT:
|
||||
return callbackArgString(v)
|
||||
case C.SQLITE_BLOB:
|
||||
return callbackArgBytes(v)
|
||||
case C.SQLITE_NULL:
|
||||
// Interpret NULL as a nil byte slice.
|
||||
var ret []byte
|
||||
return reflect.ValueOf(ret), nil
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
func callbackArg(typ reflect.Type) (callbackArgConverter, error) {
|
||||
switch typ.Kind() {
|
||||
case reflect.Interface:
|
||||
if typ.NumMethod() != 0 {
|
||||
return nil, errors.New("the only supported interface type is interface{}")
|
||||
}
|
||||
return callbackArgGeneric, nil
|
||||
case reflect.Slice:
|
||||
if typ.Elem().Kind() != reflect.Uint8 {
|
||||
return nil, errors.New("the only supported slice type is []byte")
|
||||
}
|
||||
return callbackArgBytes, nil
|
||||
case reflect.String:
|
||||
return callbackArgString, nil
|
||||
case reflect.Bool:
|
||||
return callbackArgBool, nil
|
||||
case reflect.Int64:
|
||||
return callbackArgInt64, nil
|
||||
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Int, reflect.Uint:
|
||||
c := callbackArgCast{callbackArgInt64, typ}
|
||||
return c.Run, nil
|
||||
case reflect.Float64:
|
||||
return callbackArgFloat64, nil
|
||||
case reflect.Float32:
|
||||
c := callbackArgCast{callbackArgFloat64, typ}
|
||||
return c.Run, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("don't know how to convert to %s", typ)
|
||||
}
|
||||
}
|
||||
|
||||
func callbackConvertArgs(argv []*C.sqlite3_value, converters []callbackArgConverter, variadic callbackArgConverter) ([]reflect.Value, error) {
|
||||
var args []reflect.Value
|
||||
|
||||
if len(argv) < len(converters) {
|
||||
return nil, fmt.Errorf("function requires at least %d arguments", len(converters))
|
||||
}
|
||||
|
||||
for i, arg := range argv[:len(converters)] {
|
||||
v, err := converters[i](arg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if variadic != nil {
|
||||
for _, arg := range argv[len(converters):] {
|
||||
v, err := variadic(arg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args = append(args, v)
|
||||
}
|
||||
}
|
||||
return args, nil
|
||||
}
|
||||
|
||||
type callbackRetConverter func(*C.sqlite3_context, reflect.Value) error
|
||||
|
||||
func callbackRetInteger(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int64:
|
||||
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Int, reflect.Uint:
|
||||
v = v.Convert(reflect.TypeOf(int64(0)))
|
||||
case reflect.Bool:
|
||||
b := v.Interface().(bool)
|
||||
if b {
|
||||
v = reflect.ValueOf(int64(1))
|
||||
} else {
|
||||
v = reflect.ValueOf(int64(0))
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("cannot convert %s to INTEGER", v.Type())
|
||||
}
|
||||
|
||||
C.sqlite3_result_int64(ctx, C.sqlite3_int64(v.Interface().(int64)))
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRetFloat(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Float64:
|
||||
case reflect.Float32:
|
||||
v = v.Convert(reflect.TypeOf(float64(0)))
|
||||
default:
|
||||
return fmt.Errorf("cannot convert %s to FLOAT", v.Type())
|
||||
}
|
||||
|
||||
C.sqlite3_result_double(ctx, C.double(v.Interface().(float64)))
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRetBlob(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
if v.Type().Kind() != reflect.Slice || v.Type().Elem().Kind() != reflect.Uint8 {
|
||||
return fmt.Errorf("cannot convert %s to BLOB", v.Type())
|
||||
}
|
||||
i := v.Interface()
|
||||
if i == nil || len(i.([]byte)) == 0 {
|
||||
C.sqlite3_result_null(ctx)
|
||||
} else {
|
||||
bs := i.([]byte)
|
||||
C._sqlite3_result_blob(ctx, unsafe.Pointer(&bs[0]), C.int(len(bs)))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRetText(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
if v.Type().Kind() != reflect.String {
|
||||
return fmt.Errorf("cannot convert %s to TEXT", v.Type())
|
||||
}
|
||||
C._sqlite3_result_text(ctx, C.CString(v.Interface().(string)))
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRetNil(ctx *C.sqlite3_context, v reflect.Value) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func callbackRet(typ reflect.Type) (callbackRetConverter, error) {
|
||||
switch typ.Kind() {
|
||||
case reflect.Interface:
|
||||
errorInterface := reflect.TypeOf((*error)(nil)).Elem()
|
||||
if typ.Implements(errorInterface) {
|
||||
return callbackRetNil, nil
|
||||
}
|
||||
fallthrough
|
||||
case reflect.Slice:
|
||||
if typ.Elem().Kind() != reflect.Uint8 {
|
||||
return nil, errors.New("the only supported slice type is []byte")
|
||||
}
|
||||
return callbackRetBlob, nil
|
||||
case reflect.String:
|
||||
return callbackRetText, nil
|
||||
case reflect.Bool, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Int, reflect.Uint:
|
||||
return callbackRetInteger, nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return callbackRetFloat, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("don't know how to convert to %s", typ)
|
||||
}
|
||||
}
|
||||
|
||||
func callbackError(ctx *C.sqlite3_context, err error) {
|
||||
cstr := C.CString(err.Error())
|
||||
defer C.free(unsafe.Pointer(cstr))
|
||||
C.sqlite3_result_error(ctx, cstr, C.int(-1))
|
||||
}
|
||||
|
||||
// Test support code. Tests are not allowed to import "C", so we can't
|
||||
// declare any functions that use C.sqlite3_value.
|
||||
func callbackSyntheticForTests(v reflect.Value, err error) callbackArgConverter {
|
||||
return func(*C.sqlite3_value) (reflect.Value, error) {
|
||||
return v, err
|
||||
}
|
||||
}
|
|
@ -0,0 +1,299 @@
|
|||
// Extracted from Go database/sql source code
|
||||
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Type conversions for Scan.
|
||||
|
||||
package sqlite3
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"database/sql/driver"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
var errNilPtr = errors.New("destination pointer is nil") // embedded in descriptive error
|
||||
|
||||
// convertAssign copies to dest the value in src, converting it if possible.
|
||||
// An error is returned if the copy would result in loss of information.
|
||||
// dest should be a pointer type.
|
||||
func convertAssign(dest, src interface{}) error {
|
||||
// Common cases, without reflect.
|
||||
switch s := src.(type) {
|
||||
case string:
|
||||
switch d := dest.(type) {
|
||||
case *string:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = s
|
||||
return nil
|
||||
case *[]byte:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = []byte(s)
|
||||
return nil
|
||||
case *sql.RawBytes:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = append((*d)[:0], s...)
|
||||
return nil
|
||||
}
|
||||
case []byte:
|
||||
switch d := dest.(type) {
|
||||
case *string:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = string(s)
|
||||
return nil
|
||||
case *interface{}:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = cloneBytes(s)
|
||||
return nil
|
||||
case *[]byte:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = cloneBytes(s)
|
||||
return nil
|
||||
case *sql.RawBytes:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = s
|
||||
return nil
|
||||
}
|
||||
case time.Time:
|
||||
switch d := dest.(type) {
|
||||
case *time.Time:
|
||||
*d = s
|
||||
return nil
|
||||
case *string:
|
||||
*d = s.Format(time.RFC3339Nano)
|
||||
return nil
|
||||
case *[]byte:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = []byte(s.Format(time.RFC3339Nano))
|
||||
return nil
|
||||
case *sql.RawBytes:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = s.AppendFormat((*d)[:0], time.RFC3339Nano)
|
||||
return nil
|
||||
}
|
||||
case nil:
|
||||
switch d := dest.(type) {
|
||||
case *interface{}:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = nil
|
||||
return nil
|
||||
case *[]byte:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = nil
|
||||
return nil
|
||||
case *sql.RawBytes:
|
||||
if d == nil {
|
||||
return errNilPtr
|
||||
}
|
||||
*d = nil
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var sv reflect.Value
|
||||
|
||||
switch d := dest.(type) {
|
||||
case *string:
|
||||
sv = reflect.ValueOf(src)
|
||||
switch sv.Kind() {
|
||||
case reflect.Bool,
|
||||
reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
|
||||
reflect.Float32, reflect.Float64:
|
||||
*d = asString(src)
|
||||
return nil
|
||||
}
|
||||
case *[]byte:
|
||||
sv = reflect.ValueOf(src)
|
||||
if b, ok := asBytes(nil, sv); ok {
|
||||
*d = b
|
||||
return nil
|
||||
}
|
||||
case *sql.RawBytes:
|
||||
sv = reflect.ValueOf(src)
|
||||
if b, ok := asBytes([]byte(*d)[:0], sv); ok {
|
||||
*d = sql.RawBytes(b)
|
||||
return nil
|
||||
}
|
||||
case *bool:
|
||||
bv, err := driver.Bool.ConvertValue(src)
|
||||
if err == nil {
|
||||
*d = bv.(bool)
|
||||
}
|
||||
return err
|
||||
case *interface{}:
|
||||
*d = src
|
||||
return nil
|
||||
}
|
||||
|
||||
if scanner, ok := dest.(sql.Scanner); ok {
|
||||
return scanner.Scan(src)
|
||||
}
|
||||
|
||||
dpv := reflect.ValueOf(dest)
|
||||
if dpv.Kind() != reflect.Ptr {
|
||||
return errors.New("destination not a pointer")
|
||||
}
|
||||
if dpv.IsNil() {
|
||||
return errNilPtr
|
||||
}
|
||||
|
||||
if !sv.IsValid() {
|
||||
sv = reflect.ValueOf(src)
|
||||
}
|
||||
|
||||
dv := reflect.Indirect(dpv)
|
||||
if sv.IsValid() && sv.Type().AssignableTo(dv.Type()) {
|
||||
switch b := src.(type) {
|
||||
case []byte:
|
||||
dv.Set(reflect.ValueOf(cloneBytes(b)))
|
||||
default:
|
||||
dv.Set(sv)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if dv.Kind() == sv.Kind() && sv.Type().ConvertibleTo(dv.Type()) {
|
||||
dv.Set(sv.Convert(dv.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
// The following conversions use a string value as an intermediate representation
|
||||
// to convert between various numeric types.
|
||||
//
|
||||
// This also allows scanning into user defined types such as "type Int int64".
|
||||
// For symmetry, also check for string destination types.
|
||||
switch dv.Kind() {
|
||||
case reflect.Ptr:
|
||||
if src == nil {
|
||||
dv.Set(reflect.Zero(dv.Type()))
|
||||
return nil
|
||||
}
|
||||
dv.Set(reflect.New(dv.Type().Elem()))
|
||||
return convertAssign(dv.Interface(), src)
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
s := asString(src)
|
||||
i64, err := strconv.ParseInt(s, 10, dv.Type().Bits())
|
||||
if err != nil {
|
||||
err = strconvErr(err)
|
||||
return fmt.Errorf("converting driver.Value type %T (%q) to a %s: %v", src, s, dv.Kind(), err)
|
||||
}
|
||||
dv.SetInt(i64)
|
||||
return nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
s := asString(src)
|
||||
u64, err := strconv.ParseUint(s, 10, dv.Type().Bits())
|
||||
if err != nil {
|
||||
err = strconvErr(err)
|
||||
return fmt.Errorf("converting driver.Value type %T (%q) to a %s: %v", src, s, dv.Kind(), err)
|
||||
}
|
||||
dv.SetUint(u64)
|
||||
return nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
s := asString(src)
|
||||
f64, err := strconv.ParseFloat(s, dv.Type().Bits())
|
||||
if err != nil {
|
||||
err = strconvErr(err)
|
||||
return fmt.Errorf("converting driver.Value type %T (%q) to a %s: %v", src, s, dv.Kind(), err)
|
||||
}
|
||||
dv.SetFloat(f64)
|
||||
return nil
|
||||
case reflect.String:
|
||||
switch v := src.(type) {
|
||||
case string:
|
||||
dv.SetString(v)
|
||||
return nil
|
||||
case []byte:
|
||||
dv.SetString(string(v))
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("unsupported Scan, storing driver.Value type %T into type %T", src, dest)
|
||||
}
|
||||
|
||||
func strconvErr(err error) error {
|
||||
if ne, ok := err.(*strconv.NumError); ok {
|
||||
return ne.Err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func cloneBytes(b []byte) []byte {
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
c := make([]byte, len(b))
|
||||
copy(c, b)
|
||||
return c
|
||||
}
|
||||
|
||||
func asString(src interface{}) string {
|
||||
switch v := src.(type) {
|
||||
case string:
|
||||
return v
|
||||
case []byte:
|
||||
return string(v)
|
||||
}
|
||||
rv := reflect.ValueOf(src)
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return strconv.FormatInt(rv.Int(), 10)
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return strconv.FormatUint(rv.Uint(), 10)
|
||||
case reflect.Float64:
|
||||
return strconv.FormatFloat(rv.Float(), 'g', -1, 64)
|
||||
case reflect.Float32:
|
||||
return strconv.FormatFloat(rv.Float(), 'g', -1, 32)
|
||||
case reflect.Bool:
|
||||
return strconv.FormatBool(rv.Bool())
|
||||
}
|
||||
return fmt.Sprintf("%v", src)
|
||||
}
|
||||
|
||||
func asBytes(buf []byte, rv reflect.Value) (b []byte, ok bool) {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return strconv.AppendInt(buf, rv.Int(), 10), true
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return strconv.AppendUint(buf, rv.Uint(), 10), true
|
||||
case reflect.Float32:
|
||||
return strconv.AppendFloat(buf, rv.Float(), 'g', -1, 32), true
|
||||
case reflect.Float64:
|
||||
return strconv.AppendFloat(buf, rv.Float(), 'g', -1, 64), true
|
||||
case reflect.Bool:
|
||||
return strconv.AppendBool(buf, rv.Bool()), true
|
||||
case reflect.String:
|
||||
s := rv.String()
|
||||
return append(buf, s...), true
|
||||
}
|
||||
return
|
||||
}
|
|
@ -0,0 +1,135 @@
|
|||
/*
|
||||
Package sqlite3 provides interface to SQLite3 databases.
|
||||
|
||||
This works as a driver for database/sql.
|
||||
|
||||
Installation
|
||||
|
||||
go get github.com/mattn/go-sqlite3
|
||||
|
||||
Supported Types
|
||||
|
||||
Currently, go-sqlite3 supports the following data types.
|
||||
|
||||
+------------------------------+
|
||||
|go | sqlite3 |
|
||||
|----------|-------------------|
|
||||
|nil | null |
|
||||
|int | integer |
|
||||
|int64 | integer |
|
||||
|float64 | float |
|
||||
|bool | integer |
|
||||
|[]byte | blob |
|
||||
|string | text |
|
||||
|time.Time | timestamp/datetime|
|
||||
+------------------------------+
|
||||
|
||||
SQLite3 Extension
|
||||
|
||||
You can write your own extension module for sqlite3. For example, below is an
|
||||
extension for a Regexp matcher operation.
|
||||
|
||||
#include <pcre.h>
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
#include <sqlite3ext.h>
|
||||
|
||||
SQLITE_EXTENSION_INIT1
|
||||
static void regexp_func(sqlite3_context *context, int argc, sqlite3_value **argv) {
|
||||
if (argc >= 2) {
|
||||
const char *target = (const char *)sqlite3_value_text(argv[1]);
|
||||
const char *pattern = (const char *)sqlite3_value_text(argv[0]);
|
||||
const char* errstr = NULL;
|
||||
int erroff = 0;
|
||||
int vec[500];
|
||||
int n, rc;
|
||||
pcre* re = pcre_compile(pattern, 0, &errstr, &erroff, NULL);
|
||||
rc = pcre_exec(re, NULL, target, strlen(target), 0, 0, vec, 500);
|
||||
if (rc <= 0) {
|
||||
sqlite3_result_error(context, errstr, 0);
|
||||
return;
|
||||
}
|
||||
sqlite3_result_int(context, 1);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef _WIN32
|
||||
__declspec(dllexport)
|
||||
#endif
|
||||
int sqlite3_extension_init(sqlite3 *db, char **errmsg,
|
||||
const sqlite3_api_routines *api) {
|
||||
SQLITE_EXTENSION_INIT2(api);
|
||||
return sqlite3_create_function(db, "regexp", 2, SQLITE_UTF8,
|
||||
(void*)db, regexp_func, NULL, NULL);
|
||||
}
|
||||
|
||||
It needs to be built as a so/dll shared library. And you need to register
|
||||
the extension module like below.
|
||||
|
||||
sql.Register("sqlite3_with_extensions",
|
||||
&sqlite3.SQLiteDriver{
|
||||
Extensions: []string{
|
||||
"sqlite3_mod_regexp",
|
||||
},
|
||||
})
|
||||
|
||||
Then, you can use this extension.
|
||||
|
||||
rows, err := db.Query("select text from mytable where name regexp '^golang'")
|
||||
|
||||
Connection Hook
|
||||
|
||||
You can hook and inject your code when the connection is established by setting
|
||||
ConnectHook to get the SQLiteConn.
|
||||
|
||||
sql.Register("sqlite3_with_hook_example",
|
||||
&sqlite3.SQLiteDriver{
|
||||
ConnectHook: func(conn *sqlite3.SQLiteConn) error {
|
||||
sqlite3conn = append(sqlite3conn, conn)
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
You can also use database/sql.Conn.Raw (Go >= 1.13):
|
||||
|
||||
conn, err := db.Conn(context.Background())
|
||||
// if err != nil { ... }
|
||||
defer conn.Close()
|
||||
err = conn.Raw(func (driverConn interface{}) error {
|
||||
sqliteConn := driverConn.(*sqlite3.SQLiteConn)
|
||||
// ... use sqliteConn
|
||||
})
|
||||
// if err != nil { ... }
|
||||
|
||||
Go SQlite3 Extensions
|
||||
|
||||
If you want to register Go functions as SQLite extension functions
|
||||
you can make a custom driver by calling RegisterFunction from
|
||||
ConnectHook.
|
||||
|
||||
regex = func(re, s string) (bool, error) {
|
||||
return regexp.MatchString(re, s)
|
||||
}
|
||||
sql.Register("sqlite3_extended",
|
||||
&sqlite3.SQLiteDriver{
|
||||
ConnectHook: func(conn *sqlite3.SQLiteConn) error {
|
||||
return conn.RegisterFunc("regexp", regex, true)
|
||||
},
|
||||
})
|
||||
|
||||
You can then use the custom driver by passing its name to sql.Open.
|
||||
|
||||
var i int
|
||||
conn, err := sql.Open("sqlite3_extended", "./foo.db")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
err = db.QueryRow(`SELECT regexp("foo.*", "seafood")`).Scan(&i)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
See the documentation of RegisterFunc for more details.
|
||||
|
||||
*/
|
||||
package sqlite3
|
|
@ -0,0 +1,150 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
*/
|
||||
import "C"
|
||||
import "syscall"
|
||||
|
||||
// ErrNo inherit errno.
|
||||
type ErrNo int
|
||||
|
||||
// ErrNoMask is mask code.
|
||||
const ErrNoMask C.int = 0xff
|
||||
|
||||
// ErrNoExtended is extended errno.
|
||||
type ErrNoExtended int
|
||||
|
||||
// Error implement sqlite error code.
|
||||
type Error struct {
|
||||
Code ErrNo /* The error code returned by SQLite */
|
||||
ExtendedCode ErrNoExtended /* The extended error code returned by SQLite */
|
||||
SystemErrno syscall.Errno /* The system errno returned by the OS through SQLite, if applicable */
|
||||
err string /* The error string returned by sqlite3_errmsg(),
|
||||
this usually contains more specific details. */
|
||||
}
|
||||
|
||||
// result codes from http://www.sqlite.org/c3ref/c_abort.html
|
||||
var (
|
||||
ErrError = ErrNo(1) /* SQL error or missing database */
|
||||
ErrInternal = ErrNo(2) /* Internal logic error in SQLite */
|
||||
ErrPerm = ErrNo(3) /* Access permission denied */
|
||||
ErrAbort = ErrNo(4) /* Callback routine requested an abort */
|
||||
ErrBusy = ErrNo(5) /* The database file is locked */
|
||||
ErrLocked = ErrNo(6) /* A table in the database is locked */
|
||||
ErrNomem = ErrNo(7) /* A malloc() failed */
|
||||
ErrReadonly = ErrNo(8) /* Attempt to write a readonly database */
|
||||
ErrInterrupt = ErrNo(9) /* Operation terminated by sqlite3_interrupt() */
|
||||
ErrIoErr = ErrNo(10) /* Some kind of disk I/O error occurred */
|
||||
ErrCorrupt = ErrNo(11) /* The database disk image is malformed */
|
||||
ErrNotFound = ErrNo(12) /* Unknown opcode in sqlite3_file_control() */
|
||||
ErrFull = ErrNo(13) /* Insertion failed because database is full */
|
||||
ErrCantOpen = ErrNo(14) /* Unable to open the database file */
|
||||
ErrProtocol = ErrNo(15) /* Database lock protocol error */
|
||||
ErrEmpty = ErrNo(16) /* Database is empty */
|
||||
ErrSchema = ErrNo(17) /* The database schema changed */
|
||||
ErrTooBig = ErrNo(18) /* String or BLOB exceeds size limit */
|
||||
ErrConstraint = ErrNo(19) /* Abort due to constraint violation */
|
||||
ErrMismatch = ErrNo(20) /* Data type mismatch */
|
||||
ErrMisuse = ErrNo(21) /* Library used incorrectly */
|
||||
ErrNoLFS = ErrNo(22) /* Uses OS features not supported on host */
|
||||
ErrAuth = ErrNo(23) /* Authorization denied */
|
||||
ErrFormat = ErrNo(24) /* Auxiliary database format error */
|
||||
ErrRange = ErrNo(25) /* 2nd parameter to sqlite3_bind out of range */
|
||||
ErrNotADB = ErrNo(26) /* File opened that is not a database file */
|
||||
ErrNotice = ErrNo(27) /* Notifications from sqlite3_log() */
|
||||
ErrWarning = ErrNo(28) /* Warnings from sqlite3_log() */
|
||||
)
|
||||
|
||||
// Error return error message from errno.
|
||||
func (err ErrNo) Error() string {
|
||||
return Error{Code: err}.Error()
|
||||
}
|
||||
|
||||
// Extend return extended errno.
|
||||
func (err ErrNo) Extend(by int) ErrNoExtended {
|
||||
return ErrNoExtended(int(err) | (by << 8))
|
||||
}
|
||||
|
||||
// Error return error message that is extended code.
|
||||
func (err ErrNoExtended) Error() string {
|
||||
return Error{Code: ErrNo(C.int(err) & ErrNoMask), ExtendedCode: err}.Error()
|
||||
}
|
||||
|
||||
func (err Error) Error() string {
|
||||
var str string
|
||||
if err.err != "" {
|
||||
str = err.err
|
||||
} else {
|
||||
str = C.GoString(C.sqlite3_errstr(C.int(err.Code)))
|
||||
}
|
||||
if err.SystemErrno != 0 {
|
||||
str += ": " + err.SystemErrno.Error()
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
// result codes from http://www.sqlite.org/c3ref/c_abort_rollback.html
|
||||
var (
|
||||
ErrIoErrRead = ErrIoErr.Extend(1)
|
||||
ErrIoErrShortRead = ErrIoErr.Extend(2)
|
||||
ErrIoErrWrite = ErrIoErr.Extend(3)
|
||||
ErrIoErrFsync = ErrIoErr.Extend(4)
|
||||
ErrIoErrDirFsync = ErrIoErr.Extend(5)
|
||||
ErrIoErrTruncate = ErrIoErr.Extend(6)
|
||||
ErrIoErrFstat = ErrIoErr.Extend(7)
|
||||
ErrIoErrUnlock = ErrIoErr.Extend(8)
|
||||
ErrIoErrRDlock = ErrIoErr.Extend(9)
|
||||
ErrIoErrDelete = ErrIoErr.Extend(10)
|
||||
ErrIoErrBlocked = ErrIoErr.Extend(11)
|
||||
ErrIoErrNoMem = ErrIoErr.Extend(12)
|
||||
ErrIoErrAccess = ErrIoErr.Extend(13)
|
||||
ErrIoErrCheckReservedLock = ErrIoErr.Extend(14)
|
||||
ErrIoErrLock = ErrIoErr.Extend(15)
|
||||
ErrIoErrClose = ErrIoErr.Extend(16)
|
||||
ErrIoErrDirClose = ErrIoErr.Extend(17)
|
||||
ErrIoErrSHMOpen = ErrIoErr.Extend(18)
|
||||
ErrIoErrSHMSize = ErrIoErr.Extend(19)
|
||||
ErrIoErrSHMLock = ErrIoErr.Extend(20)
|
||||
ErrIoErrSHMMap = ErrIoErr.Extend(21)
|
||||
ErrIoErrSeek = ErrIoErr.Extend(22)
|
||||
ErrIoErrDeleteNoent = ErrIoErr.Extend(23)
|
||||
ErrIoErrMMap = ErrIoErr.Extend(24)
|
||||
ErrIoErrGetTempPath = ErrIoErr.Extend(25)
|
||||
ErrIoErrConvPath = ErrIoErr.Extend(26)
|
||||
ErrLockedSharedCache = ErrLocked.Extend(1)
|
||||
ErrBusyRecovery = ErrBusy.Extend(1)
|
||||
ErrBusySnapshot = ErrBusy.Extend(2)
|
||||
ErrCantOpenNoTempDir = ErrCantOpen.Extend(1)
|
||||
ErrCantOpenIsDir = ErrCantOpen.Extend(2)
|
||||
ErrCantOpenFullPath = ErrCantOpen.Extend(3)
|
||||
ErrCantOpenConvPath = ErrCantOpen.Extend(4)
|
||||
ErrCorruptVTab = ErrCorrupt.Extend(1)
|
||||
ErrReadonlyRecovery = ErrReadonly.Extend(1)
|
||||
ErrReadonlyCantLock = ErrReadonly.Extend(2)
|
||||
ErrReadonlyRollback = ErrReadonly.Extend(3)
|
||||
ErrReadonlyDbMoved = ErrReadonly.Extend(4)
|
||||
ErrAbortRollback = ErrAbort.Extend(2)
|
||||
ErrConstraintCheck = ErrConstraint.Extend(1)
|
||||
ErrConstraintCommitHook = ErrConstraint.Extend(2)
|
||||
ErrConstraintForeignKey = ErrConstraint.Extend(3)
|
||||
ErrConstraintFunction = ErrConstraint.Extend(4)
|
||||
ErrConstraintNotNull = ErrConstraint.Extend(5)
|
||||
ErrConstraintPrimaryKey = ErrConstraint.Extend(6)
|
||||
ErrConstraintTrigger = ErrConstraint.Extend(7)
|
||||
ErrConstraintUnique = ErrConstraint.Extend(8)
|
||||
ErrConstraintVTab = ErrConstraint.Extend(9)
|
||||
ErrConstraintRowID = ErrConstraint.Extend(10)
|
||||
ErrNoticeRecoverWAL = ErrNotice.Extend(1)
|
||||
ErrNoticeRecoverRollback = ErrNotice.Extend(2)
|
||||
ErrWarningAutoIndex = ErrWarning.Extend(1)
|
||||
)
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,103 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
// These wrappers are necessary because SQLITE_TRANSIENT
|
||||
// is a pointer constant, and cgo doesn't translate them correctly.
|
||||
|
||||
static inline void my_result_text(sqlite3_context *ctx, char *p, int np) {
|
||||
sqlite3_result_text(ctx, p, np, SQLITE_TRANSIENT);
|
||||
}
|
||||
|
||||
static inline void my_result_blob(sqlite3_context *ctx, void *p, int np) {
|
||||
sqlite3_result_blob(ctx, p, np, SQLITE_TRANSIENT);
|
||||
}
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"math"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const i64 = unsafe.Sizeof(int(0)) > 4
|
||||
|
||||
// SQLiteContext behave sqlite3_context
|
||||
type SQLiteContext C.sqlite3_context
|
||||
|
||||
// ResultBool sets the result of an SQL function.
|
||||
func (c *SQLiteContext) ResultBool(b bool) {
|
||||
if b {
|
||||
c.ResultInt(1)
|
||||
} else {
|
||||
c.ResultInt(0)
|
||||
}
|
||||
}
|
||||
|
||||
// ResultBlob sets the result of an SQL function.
|
||||
// See: sqlite3_result_blob, http://sqlite.org/c3ref/result_blob.html
|
||||
func (c *SQLiteContext) ResultBlob(b []byte) {
|
||||
if i64 && len(b) > math.MaxInt32 {
|
||||
C.sqlite3_result_error_toobig((*C.sqlite3_context)(c))
|
||||
return
|
||||
}
|
||||
var p *byte
|
||||
if len(b) > 0 {
|
||||
p = &b[0]
|
||||
}
|
||||
C.my_result_blob((*C.sqlite3_context)(c), unsafe.Pointer(p), C.int(len(b)))
|
||||
}
|
||||
|
||||
// ResultDouble sets the result of an SQL function.
|
||||
// See: sqlite3_result_double, http://sqlite.org/c3ref/result_blob.html
|
||||
func (c *SQLiteContext) ResultDouble(d float64) {
|
||||
C.sqlite3_result_double((*C.sqlite3_context)(c), C.double(d))
|
||||
}
|
||||
|
||||
// ResultInt sets the result of an SQL function.
|
||||
// See: sqlite3_result_int, http://sqlite.org/c3ref/result_blob.html
|
||||
func (c *SQLiteContext) ResultInt(i int) {
|
||||
if i64 && (i > math.MaxInt32 || i < math.MinInt32) {
|
||||
C.sqlite3_result_int64((*C.sqlite3_context)(c), C.sqlite3_int64(i))
|
||||
} else {
|
||||
C.sqlite3_result_int((*C.sqlite3_context)(c), C.int(i))
|
||||
}
|
||||
}
|
||||
|
||||
// ResultInt64 sets the result of an SQL function.
|
||||
// See: sqlite3_result_int64, http://sqlite.org/c3ref/result_blob.html
|
||||
func (c *SQLiteContext) ResultInt64(i int64) {
|
||||
C.sqlite3_result_int64((*C.sqlite3_context)(c), C.sqlite3_int64(i))
|
||||
}
|
||||
|
||||
// ResultNull sets the result of an SQL function.
|
||||
// See: sqlite3_result_null, http://sqlite.org/c3ref/result_blob.html
|
||||
func (c *SQLiteContext) ResultNull() {
|
||||
C.sqlite3_result_null((*C.sqlite3_context)(c))
|
||||
}
|
||||
|
||||
// ResultText sets the result of an SQL function.
|
||||
// See: sqlite3_result_text, http://sqlite.org/c3ref/result_blob.html
|
||||
func (c *SQLiteContext) ResultText(s string) {
|
||||
h := (*reflect.StringHeader)(unsafe.Pointer(&s))
|
||||
cs, l := (*C.char)(unsafe.Pointer(h.Data)), C.int(h.Len)
|
||||
C.my_result_text((*C.sqlite3_context)(c), cs, l)
|
||||
}
|
||||
|
||||
// ResultZeroblob sets the result of an SQL function.
|
||||
// See: sqlite3_result_zeroblob, http://sqlite.org/c3ref/result_blob.html
|
||||
func (c *SQLiteContext) ResultZeroblob(n int) {
|
||||
C.sqlite3_result_zeroblob((*C.sqlite3_context)(c), C.int(n))
|
||||
}
|
|
@ -0,0 +1,120 @@
|
|||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
)
|
||||
|
||||
// This file provides several different implementations for the
|
||||
// default embedded sqlite_crypt function.
|
||||
// This function is uses a caesar-cypher by default
|
||||
// and is used within the UserAuthentication module to encode
|
||||
// the password.
|
||||
//
|
||||
// The provided functions can be used as an overload to the sqlite_crypt
|
||||
// function through the use of the RegisterFunc on the connection.
|
||||
//
|
||||
// Because the functions can serv a purpose to an end-user
|
||||
// without using the UserAuthentication module
|
||||
// the functions are default compiled in.
|
||||
//
|
||||
// From SQLITE3 - user-auth.txt
|
||||
// The sqlite_user.pw field is encoded by a built-in SQL function
|
||||
// "sqlite_crypt(X,Y)". The two arguments are both BLOBs. The first argument
|
||||
// is the plaintext password supplied to the sqlite3_user_authenticate()
|
||||
// interface. The second argument is the sqlite_user.pw value and is supplied
|
||||
// so that the function can extract the "salt" used by the password encoder.
|
||||
// The result of sqlite_crypt(X,Y) is another blob which is the value that
|
||||
// ends up being stored in sqlite_user.pw. To verify credentials X supplied
|
||||
// by the sqlite3_user_authenticate() routine, SQLite runs:
|
||||
//
|
||||
// sqlite_user.pw == sqlite_crypt(X, sqlite_user.pw)
|
||||
//
|
||||
// To compute an appropriate sqlite_user.pw value from a new or modified
|
||||
// password X, sqlite_crypt(X,NULL) is run. A new random salt is selected
|
||||
// when the second argument is NULL.
|
||||
//
|
||||
// The built-in version of of sqlite_crypt() uses a simple Caesar-cypher
|
||||
// which prevents passwords from being revealed by searching the raw database
|
||||
// for ASCII text, but is otherwise trivally broken. For better password
|
||||
// security, the database should be encrypted using the SQLite Encryption
|
||||
// Extension or similar technology. Or, the application can use the
|
||||
// sqlite3_create_function() interface to provide an alternative
|
||||
// implementation of sqlite_crypt() that computes a stronger password hash,
|
||||
// perhaps using a cryptographic hash function like SHA1.
|
||||
|
||||
// CryptEncoderSHA1 encodes a password with SHA1
|
||||
func CryptEncoderSHA1(pass []byte, hash interface{}) []byte {
|
||||
h := sha1.Sum(pass)
|
||||
return h[:]
|
||||
}
|
||||
|
||||
// CryptEncoderSSHA1 encodes a password with SHA1 with the
|
||||
// configured salt.
|
||||
func CryptEncoderSSHA1(salt string) func(pass []byte, hash interface{}) []byte {
|
||||
return func(pass []byte, hash interface{}) []byte {
|
||||
s := []byte(salt)
|
||||
p := append(pass, s...)
|
||||
h := sha1.Sum(p)
|
||||
return h[:]
|
||||
}
|
||||
}
|
||||
|
||||
// CryptEncoderSHA256 encodes a password with SHA256
|
||||
func CryptEncoderSHA256(pass []byte, hash interface{}) []byte {
|
||||
h := sha256.Sum256(pass)
|
||||
return h[:]
|
||||
}
|
||||
|
||||
// CryptEncoderSSHA256 encodes a password with SHA256
|
||||
// with the configured salt
|
||||
func CryptEncoderSSHA256(salt string) func(pass []byte, hash interface{}) []byte {
|
||||
return func(pass []byte, hash interface{}) []byte {
|
||||
s := []byte(salt)
|
||||
p := append(pass, s...)
|
||||
h := sha256.Sum256(p)
|
||||
return h[:]
|
||||
}
|
||||
}
|
||||
|
||||
// CryptEncoderSHA384 encodes a password with SHA384
|
||||
func CryptEncoderSHA384(pass []byte, hash interface{}) []byte {
|
||||
h := sha512.Sum384(pass)
|
||||
return h[:]
|
||||
}
|
||||
|
||||
// CryptEncoderSSHA384 encodes a password with SHA384
|
||||
// with the configured salt
|
||||
func CryptEncoderSSHA384(salt string) func(pass []byte, hash interface{}) []byte {
|
||||
return func(pass []byte, hash interface{}) []byte {
|
||||
s := []byte(salt)
|
||||
p := append(pass, s...)
|
||||
h := sha512.Sum384(p)
|
||||
return h[:]
|
||||
}
|
||||
}
|
||||
|
||||
// CryptEncoderSHA512 encodes a password with SHA512
|
||||
func CryptEncoderSHA512(pass []byte, hash interface{}) []byte {
|
||||
h := sha512.Sum512(pass)
|
||||
return h[:]
|
||||
}
|
||||
|
||||
// CryptEncoderSSHA512 encodes a password with SHA512
|
||||
// with the configured salt
|
||||
func CryptEncoderSSHA512(salt string) func(pass []byte, hash interface{}) []byte {
|
||||
return func(pass []byte, hash interface{}) []byte {
|
||||
s := []byte(salt)
|
||||
p := append(pass, s...)
|
||||
h := sha512.Sum512(p)
|
||||
return h[:]
|
||||
}
|
||||
}
|
||||
|
||||
// EOF
|
|
@ -0,0 +1,70 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build cgo
|
||||
// +build go1.8
|
||||
|
||||
package sqlite3
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
|
||||
"context"
|
||||
)
|
||||
|
||||
// Ping implement Pinger.
|
||||
func (c *SQLiteConn) Ping(ctx context.Context) error {
|
||||
if c.db == nil {
|
||||
// must be ErrBadConn for sql to close the database
|
||||
return driver.ErrBadConn
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// QueryContext implement QueryerContext.
|
||||
func (c *SQLiteConn) QueryContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Rows, error) {
|
||||
list := make([]namedValue, len(args))
|
||||
for i, nv := range args {
|
||||
list[i] = namedValue(nv)
|
||||
}
|
||||
return c.query(ctx, query, list)
|
||||
}
|
||||
|
||||
// ExecContext implement ExecerContext.
|
||||
func (c *SQLiteConn) ExecContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Result, error) {
|
||||
list := make([]namedValue, len(args))
|
||||
for i, nv := range args {
|
||||
list[i] = namedValue(nv)
|
||||
}
|
||||
return c.exec(ctx, query, list)
|
||||
}
|
||||
|
||||
// PrepareContext implement ConnPrepareContext.
|
||||
func (c *SQLiteConn) PrepareContext(ctx context.Context, query string) (driver.Stmt, error) {
|
||||
return c.prepare(ctx, query)
|
||||
}
|
||||
|
||||
// BeginTx implement ConnBeginTx.
|
||||
func (c *SQLiteConn) BeginTx(ctx context.Context, opts driver.TxOptions) (driver.Tx, error) {
|
||||
return c.begin(ctx)
|
||||
}
|
||||
|
||||
// QueryContext implement QueryerContext.
|
||||
func (s *SQLiteStmt) QueryContext(ctx context.Context, args []driver.NamedValue) (driver.Rows, error) {
|
||||
list := make([]namedValue, len(args))
|
||||
for i, nv := range args {
|
||||
list[i] = namedValue(nv)
|
||||
}
|
||||
return s.query(ctx, list)
|
||||
}
|
||||
|
||||
// ExecContext implement ExecerContext.
|
||||
func (s *SQLiteStmt) ExecContext(ctx context.Context, args []driver.NamedValue) (driver.Result, error) {
|
||||
list := make([]namedValue, len(args))
|
||||
for i, nv := range args {
|
||||
list[i] = namedValue(nv)
|
||||
}
|
||||
return s.exec(ctx, list)
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build libsqlite3
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DUSE_LIBSQLITE3
|
||||
#cgo linux LDFLAGS: -lsqlite3
|
||||
#cgo darwin LDFLAGS: -L/usr/local/opt/sqlite/lib -lsqlite3
|
||||
#cgo darwin CFLAGS: -I/usr/local/opt/sqlite/include
|
||||
#cgo openbsd LDFLAGS: -lsqlite3
|
||||
#cgo solaris LDFLAGS: -lsqlite3
|
||||
#cgo windows LDFLAGS: -lsqlite3
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,84 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !sqlite_omit_load_extension
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"errors"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func (c *SQLiteConn) loadExtensions(extensions []string) error {
|
||||
rv := C.sqlite3_enable_load_extension(c.db, 1)
|
||||
if rv != C.SQLITE_OK {
|
||||
return errors.New(C.GoString(C.sqlite3_errmsg(c.db)))
|
||||
}
|
||||
|
||||
for _, extension := range extensions {
|
||||
if err := c.loadExtension(extension, nil); err != nil {
|
||||
C.sqlite3_enable_load_extension(c.db, 0)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
rv = C.sqlite3_enable_load_extension(c.db, 0)
|
||||
if rv != C.SQLITE_OK {
|
||||
return errors.New(C.GoString(C.sqlite3_errmsg(c.db)))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadExtension load the sqlite3 extension.
|
||||
func (c *SQLiteConn) LoadExtension(lib string, entry string) error {
|
||||
rv := C.sqlite3_enable_load_extension(c.db, 1)
|
||||
if rv != C.SQLITE_OK {
|
||||
return errors.New(C.GoString(C.sqlite3_errmsg(c.db)))
|
||||
}
|
||||
|
||||
if err := c.loadExtension(lib, &entry); err != nil {
|
||||
C.sqlite3_enable_load_extension(c.db, 0)
|
||||
return err
|
||||
}
|
||||
|
||||
rv = C.sqlite3_enable_load_extension(c.db, 0)
|
||||
if rv != C.SQLITE_OK {
|
||||
return errors.New(C.GoString(C.sqlite3_errmsg(c.db)))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *SQLiteConn) loadExtension(lib string, entry *string) error {
|
||||
clib := C.CString(lib)
|
||||
defer C.free(unsafe.Pointer(clib))
|
||||
|
||||
var centry *C.char
|
||||
if entry != nil {
|
||||
centry = C.CString(*entry)
|
||||
defer C.free(unsafe.Pointer(centry))
|
||||
}
|
||||
|
||||
var errMsg *C.char
|
||||
defer C.sqlite3_free(unsafe.Pointer(errMsg))
|
||||
|
||||
rv := C.sqlite3_load_extension(c.db, clib, centry, &errMsg)
|
||||
if rv != C.SQLITE_OK {
|
||||
return errors.New(C.GoString(errMsg))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_omit_load_extension
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_OMIT_LOAD_EXTENSION
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
func (c *SQLiteConn) loadExtensions(extensions []string) error {
|
||||
return errors.New("Extensions have been disabled for static builds")
|
||||
}
|
||||
|
||||
func (c *SQLiteConn) LoadExtension(lib string, entry string) error {
|
||||
return errors.New("Extensions have been disabled for static builds")
|
||||
}
|
15
vendor/github.com/mattn/go-sqlite3/sqlite3_opt_allow_uri_authority.go
generated
vendored
Normal file
15
vendor/github.com/mattn/go-sqlite3/sqlite3_opt_allow_uri_authority.go
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_allow_uri_authority
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_ALLOW_URI_AUTHORITY
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,16 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !windows
|
||||
// +build sqlite_app_armor
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_API_ARMOR
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,21 @@
|
|||
// +build sqlite_column_metadata
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_COLUMN_METADATA
|
||||
#include <sqlite3-binding.h>
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
*/
|
||||
import "C"
|
||||
|
||||
// ColumnTableName returns the table that is the origin of a particular result
|
||||
// column in a SELECT statement.
|
||||
//
|
||||
// See https://www.sqlite.org/c3ref/column_database_name.html
|
||||
func (s *SQLiteStmt) ColumnTableName(n int) string {
|
||||
return C.GoString(C.sqlite3_column_table_name(s.s, C.int(n)))
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_foreign_keys
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_DEFAULT_FOREIGN_KEYS=1
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,14 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_fts5 fts5
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_FTS5
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,17 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_icu icu
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo LDFLAGS: -licuuc -licui18n
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_ICU
|
||||
#cgo darwin CFLAGS: -I/usr/local/opt/icu4c/include
|
||||
#cgo darwin LDFLAGS: -L/usr/local/opt/icu4c/lib
|
||||
#cgo openbsd LDFLAGS: -lsqlite3
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_introspect
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_INTROSPECTION_PRAGMAS
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,20 @@
|
|||
// Copyright (C) 2019 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
// Copyright (C) 2018 segment.com <friends@segment.com>
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build cgo
|
||||
|
||||
package sqlite3
|
||||
|
||||
// SQLitePreUpdateData represents all of the data available during a
|
||||
// pre-update hook call.
|
||||
type SQLitePreUpdateData struct {
|
||||
Conn *SQLiteConn
|
||||
Op int
|
||||
DatabaseName string
|
||||
TableName string
|
||||
OldRowID int64
|
||||
NewRowID int64
|
||||
}
|
|
@ -0,0 +1,112 @@
|
|||
// Copyright (C) 2019 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
// Copyright (C) 2018 segment.com <friends@segment.com>
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_preupdate_hook
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_PREUPDATE_HOOK
|
||||
#cgo LDFLAGS: -lm
|
||||
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
void preUpdateHookTrampoline(void*, sqlite3 *, int, char *, char *, sqlite3_int64, sqlite3_int64);
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"errors"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// RegisterPreUpdateHook sets the pre-update hook for a connection.
|
||||
//
|
||||
// The callback is passed a SQLitePreUpdateData struct with the data for
|
||||
// the update, as well as methods for fetching copies of impacted data.
|
||||
//
|
||||
// If there is an existing preupdate hook for this connection, it will be
|
||||
// removed. If callback is nil the existing hook (if any) will be removed
|
||||
// without creating a new one.
|
||||
func (c *SQLiteConn) RegisterPreUpdateHook(callback func(SQLitePreUpdateData)) {
|
||||
if callback == nil {
|
||||
C.sqlite3_preupdate_hook(c.db, nil, nil)
|
||||
} else {
|
||||
C.sqlite3_preupdate_hook(c.db, (*[0]byte)(unsafe.Pointer(C.preUpdateHookTrampoline)), unsafe.Pointer(newHandle(c, callback)))
|
||||
}
|
||||
}
|
||||
|
||||
// Depth returns the source path of the write, see sqlite3_preupdate_depth()
|
||||
func (d *SQLitePreUpdateData) Depth() int {
|
||||
return int(C.sqlite3_preupdate_depth(d.Conn.db))
|
||||
}
|
||||
|
||||
// Count returns the number of columns in the row
|
||||
func (d *SQLitePreUpdateData) Count() int {
|
||||
return int(C.sqlite3_preupdate_count(d.Conn.db))
|
||||
}
|
||||
|
||||
func (d *SQLitePreUpdateData) row(dest []interface{}, new bool) error {
|
||||
for i := 0; i < d.Count() && i < len(dest); i++ {
|
||||
var val *C.sqlite3_value
|
||||
var src interface{}
|
||||
|
||||
// Initially I tried making this just a function pointer argument, but
|
||||
// it's absurdly complicated to pass C function pointers.
|
||||
if new {
|
||||
C.sqlite3_preupdate_new(d.Conn.db, C.int(i), &val)
|
||||
} else {
|
||||
C.sqlite3_preupdate_old(d.Conn.db, C.int(i), &val)
|
||||
}
|
||||
|
||||
switch C.sqlite3_value_type(val) {
|
||||
case C.SQLITE_INTEGER:
|
||||
src = int64(C.sqlite3_value_int64(val))
|
||||
case C.SQLITE_FLOAT:
|
||||
src = float64(C.sqlite3_value_double(val))
|
||||
case C.SQLITE_BLOB:
|
||||
len := C.sqlite3_value_bytes(val)
|
||||
blobptr := C.sqlite3_value_blob(val)
|
||||
src = C.GoBytes(blobptr, len)
|
||||
case C.SQLITE_TEXT:
|
||||
len := C.sqlite3_value_bytes(val)
|
||||
cstrptr := unsafe.Pointer(C.sqlite3_value_text(val))
|
||||
src = C.GoBytes(cstrptr, len)
|
||||
case C.SQLITE_NULL:
|
||||
src = nil
|
||||
}
|
||||
|
||||
err := convertAssign(&dest[i], src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Old populates dest with the row data to be replaced. This works similar to
|
||||
// database/sql's Rows.Scan()
|
||||
func (d *SQLitePreUpdateData) Old(dest ...interface{}) error {
|
||||
if d.Op == SQLITE_INSERT {
|
||||
return errors.New("There is no old row for INSERT operations")
|
||||
}
|
||||
return d.row(dest, false)
|
||||
}
|
||||
|
||||
// New populates dest with the replacement row data. This works similar to
|
||||
// database/sql's Rows.Scan()
|
||||
func (d *SQLitePreUpdateData) New(dest ...interface{}) error {
|
||||
if d.Op == SQLITE_DELETE {
|
||||
return errors.New("There is no new row for DELETE operations")
|
||||
}
|
||||
return d.row(dest, true)
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
// Copyright (C) 2019 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
// Copyright (C) 2018 segment.com <friends@segment.com>
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !sqlite_preupdate_hook,cgo
|
||||
|
||||
package sqlite3
|
||||
|
||||
// RegisterPreUpdateHook sets the pre-update hook for a connection.
|
||||
//
|
||||
// The callback is passed a SQLitePreUpdateData struct with the data for
|
||||
// the update, as well as methods for fetching copies of impacted data.
|
||||
//
|
||||
// If there is an existing preupdate hook for this connection, it will be
|
||||
// removed. If callback is nil the existing hook (if any) will be removed
|
||||
// without creating a new one.
|
||||
func (c *SQLiteConn) RegisterPreUpdateHook(callback func(SQLitePreUpdateData)) {
|
||||
// NOOP
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_secure_delete
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_SECURE_DELETE=1
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
15
vendor/github.com/mattn/go-sqlite3/sqlite3_opt_secure_delete_fast.go
generated
vendored
Normal file
15
vendor/github.com/mattn/go-sqlite3/sqlite3_opt_secure_delete_fast.go
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_secure_delete_fast
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_SECURE_DELETE=FAST
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_stat4
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_STAT4
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,85 @@
|
|||
// Copyright (C) 2018 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
#ifdef SQLITE_ENABLE_UNLOCK_NOTIFY
|
||||
#include <stdio.h>
|
||||
#include "sqlite3-binding.h"
|
||||
|
||||
extern int unlock_notify_wait(sqlite3 *db);
|
||||
|
||||
int
|
||||
_sqlite3_step_blocking(sqlite3_stmt *stmt)
|
||||
{
|
||||
int rv;
|
||||
sqlite3* db;
|
||||
|
||||
db = sqlite3_db_handle(stmt);
|
||||
for (;;) {
|
||||
rv = sqlite3_step(stmt);
|
||||
if (rv != SQLITE_LOCKED) {
|
||||
break;
|
||||
}
|
||||
if (sqlite3_extended_errcode(db) != SQLITE_LOCKED_SHAREDCACHE) {
|
||||
break;
|
||||
}
|
||||
rv = unlock_notify_wait(db);
|
||||
if (rv != SQLITE_OK) {
|
||||
break;
|
||||
}
|
||||
sqlite3_reset(stmt);
|
||||
}
|
||||
|
||||
return rv;
|
||||
}
|
||||
|
||||
int
|
||||
_sqlite3_step_row_blocking(sqlite3_stmt* stmt, long long* rowid, long long* changes)
|
||||
{
|
||||
int rv;
|
||||
sqlite3* db;
|
||||
|
||||
db = sqlite3_db_handle(stmt);
|
||||
for (;;) {
|
||||
rv = sqlite3_step(stmt);
|
||||
if (rv!=SQLITE_LOCKED) {
|
||||
break;
|
||||
}
|
||||
if (sqlite3_extended_errcode(db) != SQLITE_LOCKED_SHAREDCACHE) {
|
||||
break;
|
||||
}
|
||||
rv = unlock_notify_wait(db);
|
||||
if (rv != SQLITE_OK) {
|
||||
break;
|
||||
}
|
||||
sqlite3_reset(stmt);
|
||||
}
|
||||
|
||||
*rowid = (long long) sqlite3_last_insert_rowid(db);
|
||||
*changes = (long long) sqlite3_changes(db);
|
||||
return rv;
|
||||
}
|
||||
|
||||
int
|
||||
_sqlite3_prepare_v2_blocking(sqlite3 *db, const char *zSql, int nBytes, sqlite3_stmt **ppStmt, const char **pzTail)
|
||||
{
|
||||
int rv;
|
||||
|
||||
for (;;) {
|
||||
rv = sqlite3_prepare_v2(db, zSql, nBytes, ppStmt, pzTail);
|
||||
if (rv!=SQLITE_LOCKED) {
|
||||
break;
|
||||
}
|
||||
if (sqlite3_extended_errcode(db) != SQLITE_LOCKED_SHAREDCACHE) {
|
||||
break;
|
||||
}
|
||||
rv = unlock_notify_wait(db);
|
||||
if (rv != SQLITE_OK) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return rv;
|
||||
}
|
||||
#endif
|
|
@ -0,0 +1,93 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build cgo
|
||||
// +build sqlite_unlock_notify
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_UNLOCK_NOTIFY
|
||||
|
||||
#include <stdlib.h>
|
||||
#include "sqlite3-binding.h"
|
||||
|
||||
extern void unlock_notify_callback(void *arg, int argc);
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"sync"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type unlock_notify_table struct {
|
||||
sync.Mutex
|
||||
seqnum uint
|
||||
table map[uint]chan struct{}
|
||||
}
|
||||
|
||||
var unt unlock_notify_table = unlock_notify_table{table: make(map[uint]chan struct{})}
|
||||
|
||||
func (t *unlock_notify_table) add(c chan struct{}) uint {
|
||||
t.Lock()
|
||||
defer t.Unlock()
|
||||
h := t.seqnum
|
||||
t.table[h] = c
|
||||
t.seqnum++
|
||||
return h
|
||||
}
|
||||
|
||||
func (t *unlock_notify_table) remove(h uint) {
|
||||
t.Lock()
|
||||
defer t.Unlock()
|
||||
delete(t.table, h)
|
||||
}
|
||||
|
||||
func (t *unlock_notify_table) get(h uint) chan struct{} {
|
||||
t.Lock()
|
||||
defer t.Unlock()
|
||||
c, ok := t.table[h]
|
||||
if !ok {
|
||||
panic(fmt.Sprintf("Non-existent key for unlcok-notify channel: %d", h))
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
//export unlock_notify_callback
|
||||
func unlock_notify_callback(argv unsafe.Pointer, argc C.int) {
|
||||
for i := 0; i < int(argc); i++ {
|
||||
parg := ((*(*[(math.MaxInt32 - 1) / unsafe.Sizeof((*C.uint)(nil))]*[1]uint)(argv))[i])
|
||||
arg := *parg
|
||||
h := arg[0]
|
||||
c := unt.get(h)
|
||||
c <- struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
//export unlock_notify_wait
|
||||
func unlock_notify_wait(db *C.sqlite3) C.int {
|
||||
// It has to be a bufferred channel to not block in sqlite_unlock_notify
|
||||
// as sqlite_unlock_notify could invoke the callback before it returns.
|
||||
c := make(chan struct{}, 1)
|
||||
defer close(c)
|
||||
|
||||
h := unt.add(c)
|
||||
defer unt.remove(h)
|
||||
|
||||
pargv := C.malloc(C.sizeof_uint)
|
||||
defer C.free(pargv)
|
||||
|
||||
argv := (*[1]uint)(pargv)
|
||||
argv[0] = h
|
||||
if rv := C.sqlite3_unlock_notify(db, (*[0]byte)(C.unlock_notify_callback), unsafe.Pointer(pargv)); rv != C.SQLITE_OK {
|
||||
return rv
|
||||
}
|
||||
|
||||
<-c
|
||||
|
||||
return C.SQLITE_OK
|
||||
}
|
|
@ -0,0 +1,289 @@
|
|||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_userauth
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_USER_AUTHENTICATION
|
||||
#cgo LDFLAGS: -lm
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
|
||||
static int
|
||||
_sqlite3_user_authenticate(sqlite3* db, const char* zUsername, const char* aPW, int nPW)
|
||||
{
|
||||
return sqlite3_user_authenticate(db, zUsername, aPW, nPW);
|
||||
}
|
||||
|
||||
static int
|
||||
_sqlite3_user_add(sqlite3* db, const char* zUsername, const char* aPW, int nPW, int isAdmin)
|
||||
{
|
||||
return sqlite3_user_add(db, zUsername, aPW, nPW, isAdmin);
|
||||
}
|
||||
|
||||
static int
|
||||
_sqlite3_user_change(sqlite3* db, const char* zUsername, const char* aPW, int nPW, int isAdmin)
|
||||
{
|
||||
return sqlite3_user_change(db, zUsername, aPW, nPW, isAdmin);
|
||||
}
|
||||
|
||||
static int
|
||||
_sqlite3_user_delete(sqlite3* db, const char* zUsername)
|
||||
{
|
||||
return sqlite3_user_delete(db, zUsername);
|
||||
}
|
||||
|
||||
static int
|
||||
_sqlite3_auth_enabled(sqlite3* db)
|
||||
{
|
||||
int exists = -1;
|
||||
|
||||
sqlite3_stmt *stmt;
|
||||
sqlite3_prepare_v2(db, "select count(type) from sqlite_master WHERE type='table' and name='sqlite_user';", -1, &stmt, NULL);
|
||||
|
||||
while ( sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
exists = sqlite3_column_int(stmt, 0);
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
|
||||
return exists;
|
||||
}
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"errors"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const (
|
||||
SQLITE_AUTH = C.SQLITE_AUTH
|
||||
)
|
||||
|
||||
var (
|
||||
ErrUnauthorized = errors.New("SQLITE_AUTH: Unauthorized")
|
||||
ErrAdminRequired = errors.New("SQLITE_AUTH: Unauthorized; Admin Privileges Required")
|
||||
)
|
||||
|
||||
// Authenticate will perform an authentication of the provided username
|
||||
// and password against the database.
|
||||
//
|
||||
// If a database contains the SQLITE_USER table, then the
|
||||
// call to Authenticate must be invoked with an
|
||||
// appropriate username and password prior to enable read and write
|
||||
//access to the database.
|
||||
//
|
||||
// Return SQLITE_OK on success or SQLITE_ERROR if the username/password
|
||||
// combination is incorrect or unknown.
|
||||
//
|
||||
// If the SQLITE_USER table is not present in the database file, then
|
||||
// this interface is a harmless no-op returnning SQLITE_OK.
|
||||
func (c *SQLiteConn) Authenticate(username, password string) error {
|
||||
rv := c.authenticate(username, password)
|
||||
switch rv {
|
||||
case C.SQLITE_ERROR, C.SQLITE_AUTH:
|
||||
return ErrUnauthorized
|
||||
case C.SQLITE_OK:
|
||||
return nil
|
||||
default:
|
||||
return c.lastError()
|
||||
}
|
||||
}
|
||||
|
||||
// authenticate provides the actual authentication to SQLite.
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// C.SQLITE_OK (0)
|
||||
// C.SQLITE_ERROR (1)
|
||||
// C.SQLITE_AUTH (23)
|
||||
func (c *SQLiteConn) authenticate(username, password string) int {
|
||||
// Allocate C Variables
|
||||
cuser := C.CString(username)
|
||||
cpass := C.CString(password)
|
||||
|
||||
// Free C Variables
|
||||
defer func() {
|
||||
C.free(unsafe.Pointer(cuser))
|
||||
C.free(unsafe.Pointer(cpass))
|
||||
}()
|
||||
|
||||
return int(C._sqlite3_user_authenticate(c.db, cuser, cpass, C.int(len(password))))
|
||||
}
|
||||
|
||||
// AuthUserAdd can be used (by an admin user only)
|
||||
// to create a new user. When called on a no-authentication-required
|
||||
// database, this routine converts the database into an authentication-
|
||||
// required database, automatically makes the added user an
|
||||
// administrator, and logs in the current connection as that user.
|
||||
// The AuthUserAdd only works for the "main" database, not
|
||||
// for any ATTACH-ed databases. Any call to AuthUserAdd by a
|
||||
// non-admin user results in an error.
|
||||
func (c *SQLiteConn) AuthUserAdd(username, password string, admin bool) error {
|
||||
isAdmin := 0
|
||||
if admin {
|
||||
isAdmin = 1
|
||||
}
|
||||
|
||||
rv := c.authUserAdd(username, password, isAdmin)
|
||||
switch rv {
|
||||
case C.SQLITE_ERROR, C.SQLITE_AUTH:
|
||||
return ErrAdminRequired
|
||||
case C.SQLITE_OK:
|
||||
return nil
|
||||
default:
|
||||
return c.lastError()
|
||||
}
|
||||
}
|
||||
|
||||
// authUserAdd enables the User Authentication if not enabled.
|
||||
// Otherwise it will add a user.
|
||||
//
|
||||
// When user authentication is already enabled then this function
|
||||
// can only be called by an admin.
|
||||
//
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// C.SQLITE_OK (0)
|
||||
// C.SQLITE_ERROR (1)
|
||||
// C.SQLITE_AUTH (23)
|
||||
func (c *SQLiteConn) authUserAdd(username, password string, admin int) int {
|
||||
// Allocate C Variables
|
||||
cuser := C.CString(username)
|
||||
cpass := C.CString(password)
|
||||
|
||||
// Free C Variables
|
||||
defer func() {
|
||||
C.free(unsafe.Pointer(cuser))
|
||||
C.free(unsafe.Pointer(cpass))
|
||||
}()
|
||||
|
||||
return int(C._sqlite3_user_add(c.db, cuser, cpass, C.int(len(password)), C.int(admin)))
|
||||
}
|
||||
|
||||
// AuthUserChange can be used to change a users
|
||||
// login credentials or admin privilege. Any user can change their own
|
||||
// login credentials. Only an admin user can change another users login
|
||||
// credentials or admin privilege setting. No user may change their own
|
||||
// admin privilege setting.
|
||||
func (c *SQLiteConn) AuthUserChange(username, password string, admin bool) error {
|
||||
isAdmin := 0
|
||||
if admin {
|
||||
isAdmin = 1
|
||||
}
|
||||
|
||||
rv := c.authUserChange(username, password, isAdmin)
|
||||
switch rv {
|
||||
case C.SQLITE_ERROR, C.SQLITE_AUTH:
|
||||
return ErrAdminRequired
|
||||
case C.SQLITE_OK:
|
||||
return nil
|
||||
default:
|
||||
return c.lastError()
|
||||
}
|
||||
}
|
||||
|
||||
// authUserChange allows to modify a user.
|
||||
// Users can change their own password.
|
||||
//
|
||||
// Only admins can change passwords for other users
|
||||
// and modify the admin flag.
|
||||
//
|
||||
// The admin flag of the current logged in user cannot be changed.
|
||||
// THis ensures that their is always an admin.
|
||||
//
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// C.SQLITE_OK (0)
|
||||
// C.SQLITE_ERROR (1)
|
||||
// C.SQLITE_AUTH (23)
|
||||
func (c *SQLiteConn) authUserChange(username, password string, admin int) int {
|
||||
// Allocate C Variables
|
||||
cuser := C.CString(username)
|
||||
cpass := C.CString(password)
|
||||
|
||||
// Free C Variables
|
||||
defer func() {
|
||||
C.free(unsafe.Pointer(cuser))
|
||||
C.free(unsafe.Pointer(cpass))
|
||||
}()
|
||||
|
||||
return int(C._sqlite3_user_change(c.db, cuser, cpass, C.int(len(password)), C.int(admin)))
|
||||
}
|
||||
|
||||
// AuthUserDelete can be used (by an admin user only)
|
||||
// to delete a user. The currently logged-in user cannot be deleted,
|
||||
// which guarantees that there is always an admin user and hence that
|
||||
// the database cannot be converted into a no-authentication-required
|
||||
// database.
|
||||
func (c *SQLiteConn) AuthUserDelete(username string) error {
|
||||
rv := c.authUserDelete(username)
|
||||
switch rv {
|
||||
case C.SQLITE_ERROR, C.SQLITE_AUTH:
|
||||
return ErrAdminRequired
|
||||
case C.SQLITE_OK:
|
||||
return nil
|
||||
default:
|
||||
return c.lastError()
|
||||
}
|
||||
}
|
||||
|
||||
// authUserDelete can be used to delete a user.
|
||||
//
|
||||
// This function can only be executed by an admin.
|
||||
//
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// C.SQLITE_OK (0)
|
||||
// C.SQLITE_ERROR (1)
|
||||
// C.SQLITE_AUTH (23)
|
||||
func (c *SQLiteConn) authUserDelete(username string) int {
|
||||
// Allocate C Variables
|
||||
cuser := C.CString(username)
|
||||
|
||||
// Free C Variables
|
||||
defer func() {
|
||||
C.free(unsafe.Pointer(cuser))
|
||||
}()
|
||||
|
||||
return int(C._sqlite3_user_delete(c.db, cuser))
|
||||
}
|
||||
|
||||
// AuthEnabled checks if the database is protected by user authentication
|
||||
func (c *SQLiteConn) AuthEnabled() (exists bool) {
|
||||
rv := c.authEnabled()
|
||||
if rv == 1 {
|
||||
exists = true
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// authEnabled perform the actual check for user authentication.
|
||||
//
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// 0 - Disabled
|
||||
// 1 - Enabled
|
||||
func (c *SQLiteConn) authEnabled() int {
|
||||
return int(C._sqlite3_auth_enabled(c.db))
|
||||
}
|
||||
|
||||
// EOF
|
|
@ -0,0 +1,152 @@
|
|||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !sqlite_userauth
|
||||
|
||||
package sqlite3
|
||||
|
||||
import (
|
||||
"C"
|
||||
)
|
||||
|
||||
// Authenticate will perform an authentication of the provided username
|
||||
// and password against the database.
|
||||
//
|
||||
// If a database contains the SQLITE_USER table, then the
|
||||
// call to Authenticate must be invoked with an
|
||||
// appropriate username and password prior to enable read and write
|
||||
//access to the database.
|
||||
//
|
||||
// Return SQLITE_OK on success or SQLITE_ERROR if the username/password
|
||||
// combination is incorrect or unknown.
|
||||
//
|
||||
// If the SQLITE_USER table is not present in the database file, then
|
||||
// this interface is a harmless no-op returnning SQLITE_OK.
|
||||
func (c *SQLiteConn) Authenticate(username, password string) error {
|
||||
// NOOP
|
||||
return nil
|
||||
}
|
||||
|
||||
// authenticate provides the actual authentication to SQLite.
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// C.SQLITE_OK (0)
|
||||
// C.SQLITE_ERROR (1)
|
||||
// C.SQLITE_AUTH (23)
|
||||
func (c *SQLiteConn) authenticate(username, password string) int {
|
||||
// NOOP
|
||||
return 0
|
||||
}
|
||||
|
||||
// AuthUserAdd can be used (by an admin user only)
|
||||
// to create a new user. When called on a no-authentication-required
|
||||
// database, this routine converts the database into an authentication-
|
||||
// required database, automatically makes the added user an
|
||||
// administrator, and logs in the current connection as that user.
|
||||
// The AuthUserAdd only works for the "main" database, not
|
||||
// for any ATTACH-ed databases. Any call to AuthUserAdd by a
|
||||
// non-admin user results in an error.
|
||||
func (c *SQLiteConn) AuthUserAdd(username, password string, admin bool) error {
|
||||
// NOOP
|
||||
return nil
|
||||
}
|
||||
|
||||
// authUserAdd enables the User Authentication if not enabled.
|
||||
// Otherwise it will add a user.
|
||||
//
|
||||
// When user authentication is already enabled then this function
|
||||
// can only be called by an admin.
|
||||
//
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// C.SQLITE_OK (0)
|
||||
// C.SQLITE_ERROR (1)
|
||||
// C.SQLITE_AUTH (23)
|
||||
func (c *SQLiteConn) authUserAdd(username, password string, admin int) int {
|
||||
// NOOP
|
||||
return 0
|
||||
}
|
||||
|
||||
// AuthUserChange can be used to change a users
|
||||
// login credentials or admin privilege. Any user can change their own
|
||||
// login credentials. Only an admin user can change another users login
|
||||
// credentials or admin privilege setting. No user may change their own
|
||||
// admin privilege setting.
|
||||
func (c *SQLiteConn) AuthUserChange(username, password string, admin bool) error {
|
||||
// NOOP
|
||||
return nil
|
||||
}
|
||||
|
||||
// authUserChange allows to modify a user.
|
||||
// Users can change their own password.
|
||||
//
|
||||
// Only admins can change passwords for other users
|
||||
// and modify the admin flag.
|
||||
//
|
||||
// The admin flag of the current logged in user cannot be changed.
|
||||
// THis ensures that their is always an admin.
|
||||
//
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// C.SQLITE_OK (0)
|
||||
// C.SQLITE_ERROR (1)
|
||||
// C.SQLITE_AUTH (23)
|
||||
func (c *SQLiteConn) authUserChange(username, password string, admin int) int {
|
||||
// NOOP
|
||||
return 0
|
||||
}
|
||||
|
||||
// AuthUserDelete can be used (by an admin user only)
|
||||
// to delete a user. The currently logged-in user cannot be deleted,
|
||||
// which guarantees that there is always an admin user and hence that
|
||||
// the database cannot be converted into a no-authentication-required
|
||||
// database.
|
||||
func (c *SQLiteConn) AuthUserDelete(username string) error {
|
||||
// NOOP
|
||||
return nil
|
||||
}
|
||||
|
||||
// authUserDelete can be used to delete a user.
|
||||
//
|
||||
// This function can only be executed by an admin.
|
||||
//
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// C.SQLITE_OK (0)
|
||||
// C.SQLITE_ERROR (1)
|
||||
// C.SQLITE_AUTH (23)
|
||||
func (c *SQLiteConn) authUserDelete(username string) int {
|
||||
// NOOP
|
||||
return 0
|
||||
}
|
||||
|
||||
// AuthEnabled checks if the database is protected by user authentication
|
||||
func (c *SQLiteConn) AuthEnabled() (exists bool) {
|
||||
// NOOP
|
||||
return false
|
||||
}
|
||||
|
||||
// authEnabled perform the actual check for user authentication.
|
||||
//
|
||||
// This is not exported for usage in Go.
|
||||
// It is however exported for usage within SQL by the user.
|
||||
//
|
||||
// Returns:
|
||||
// 0 - Disabled
|
||||
// 1 - Enabled
|
||||
func (c *SQLiteConn) authEnabled() int {
|
||||
// NOOP
|
||||
return 0
|
||||
}
|
||||
|
||||
// EOF
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_vacuum_full
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_DEFAULT_AUTOVACUUM=1
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_vacuum_incr
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -DSQLITE_DEFAULT_AUTOVACUUM=2
|
||||
#cgo LDFLAGS: -lm
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,720 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_vtable vtable
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -std=gnu99
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_RTREE
|
||||
#cgo CFLAGS: -DSQLITE_THREADSAFE
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_FTS3
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_FTS3_PARENTHESIS
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_FTS4_UNICODE61
|
||||
#cgo CFLAGS: -DSQLITE_TRACE_SIZE_LIMIT=15
|
||||
#cgo CFLAGS: -DSQLITE_ENABLE_COLUMN_METADATA=1
|
||||
#cgo CFLAGS: -Wno-deprecated-declarations
|
||||
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <memory.h>
|
||||
|
||||
static inline char *_sqlite3_mprintf(char *zFormat, char *arg) {
|
||||
return sqlite3_mprintf(zFormat, arg);
|
||||
}
|
||||
|
||||
typedef struct goVTab goVTab;
|
||||
|
||||
struct goVTab {
|
||||
sqlite3_vtab base;
|
||||
void *vTab;
|
||||
};
|
||||
|
||||
uintptr_t goMInit(void *db, void *pAux, int argc, char **argv, char **pzErr, int isCreate);
|
||||
|
||||
static int cXInit(sqlite3 *db, void *pAux, int argc, const char *const*argv, sqlite3_vtab **ppVTab, char **pzErr, int isCreate) {
|
||||
void *vTab = (void *)goMInit(db, pAux, argc, (char**)argv, pzErr, isCreate);
|
||||
if (!vTab || *pzErr) {
|
||||
return SQLITE_ERROR;
|
||||
}
|
||||
goVTab *pvTab = (goVTab *)sqlite3_malloc(sizeof(goVTab));
|
||||
if (!pvTab) {
|
||||
*pzErr = sqlite3_mprintf("%s", "Out of memory");
|
||||
return SQLITE_NOMEM;
|
||||
}
|
||||
memset(pvTab, 0, sizeof(goVTab));
|
||||
pvTab->vTab = vTab;
|
||||
|
||||
*ppVTab = (sqlite3_vtab *)pvTab;
|
||||
*pzErr = 0;
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
static inline int cXCreate(sqlite3 *db, void *pAux, int argc, const char *const*argv, sqlite3_vtab **ppVTab, char **pzErr) {
|
||||
return cXInit(db, pAux, argc, argv, ppVTab, pzErr, 1);
|
||||
}
|
||||
static inline int cXConnect(sqlite3 *db, void *pAux, int argc, const char *const*argv, sqlite3_vtab **ppVTab, char **pzErr) {
|
||||
return cXInit(db, pAux, argc, argv, ppVTab, pzErr, 0);
|
||||
}
|
||||
|
||||
char* goVBestIndex(void *pVTab, void *icp);
|
||||
|
||||
static inline int cXBestIndex(sqlite3_vtab *pVTab, sqlite3_index_info *info) {
|
||||
char *pzErr = goVBestIndex(((goVTab*)pVTab)->vTab, info);
|
||||
if (pzErr) {
|
||||
if (pVTab->zErrMsg)
|
||||
sqlite3_free(pVTab->zErrMsg);
|
||||
pVTab->zErrMsg = pzErr;
|
||||
return SQLITE_ERROR;
|
||||
}
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
char* goVRelease(void *pVTab, int isDestroy);
|
||||
|
||||
static int cXRelease(sqlite3_vtab *pVTab, int isDestroy) {
|
||||
char *pzErr = goVRelease(((goVTab*)pVTab)->vTab, isDestroy);
|
||||
if (pzErr) {
|
||||
if (pVTab->zErrMsg)
|
||||
sqlite3_free(pVTab->zErrMsg);
|
||||
pVTab->zErrMsg = pzErr;
|
||||
return SQLITE_ERROR;
|
||||
}
|
||||
if (pVTab->zErrMsg)
|
||||
sqlite3_free(pVTab->zErrMsg);
|
||||
sqlite3_free(pVTab);
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
static inline int cXDisconnect(sqlite3_vtab *pVTab) {
|
||||
return cXRelease(pVTab, 0);
|
||||
}
|
||||
static inline int cXDestroy(sqlite3_vtab *pVTab) {
|
||||
return cXRelease(pVTab, 1);
|
||||
}
|
||||
|
||||
typedef struct goVTabCursor goVTabCursor;
|
||||
|
||||
struct goVTabCursor {
|
||||
sqlite3_vtab_cursor base;
|
||||
void *vTabCursor;
|
||||
};
|
||||
|
||||
uintptr_t goVOpen(void *pVTab, char **pzErr);
|
||||
|
||||
static int cXOpen(sqlite3_vtab *pVTab, sqlite3_vtab_cursor **ppCursor) {
|
||||
void *vTabCursor = (void *)goVOpen(((goVTab*)pVTab)->vTab, &(pVTab->zErrMsg));
|
||||
goVTabCursor *pCursor = (goVTabCursor *)sqlite3_malloc(sizeof(goVTabCursor));
|
||||
if (!pCursor) {
|
||||
return SQLITE_NOMEM;
|
||||
}
|
||||
memset(pCursor, 0, sizeof(goVTabCursor));
|
||||
pCursor->vTabCursor = vTabCursor;
|
||||
*ppCursor = (sqlite3_vtab_cursor *)pCursor;
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
static int setErrMsg(sqlite3_vtab_cursor *pCursor, char *pzErr) {
|
||||
if (pCursor->pVtab->zErrMsg)
|
||||
sqlite3_free(pCursor->pVtab->zErrMsg);
|
||||
pCursor->pVtab->zErrMsg = pzErr;
|
||||
return SQLITE_ERROR;
|
||||
}
|
||||
|
||||
char* goVClose(void *pCursor);
|
||||
|
||||
static int cXClose(sqlite3_vtab_cursor *pCursor) {
|
||||
char *pzErr = goVClose(((goVTabCursor*)pCursor)->vTabCursor);
|
||||
if (pzErr) {
|
||||
return setErrMsg(pCursor, pzErr);
|
||||
}
|
||||
sqlite3_free(pCursor);
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
char* goVFilter(void *pCursor, int idxNum, char* idxName, int argc, sqlite3_value **argv);
|
||||
|
||||
static int cXFilter(sqlite3_vtab_cursor *pCursor, int idxNum, const char *idxStr, int argc, sqlite3_value **argv) {
|
||||
char *pzErr = goVFilter(((goVTabCursor*)pCursor)->vTabCursor, idxNum, (char*)idxStr, argc, argv);
|
||||
if (pzErr) {
|
||||
return setErrMsg(pCursor, pzErr);
|
||||
}
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
char* goVNext(void *pCursor);
|
||||
|
||||
static int cXNext(sqlite3_vtab_cursor *pCursor) {
|
||||
char *pzErr = goVNext(((goVTabCursor*)pCursor)->vTabCursor);
|
||||
if (pzErr) {
|
||||
return setErrMsg(pCursor, pzErr);
|
||||
}
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
int goVEof(void *pCursor);
|
||||
|
||||
static inline int cXEof(sqlite3_vtab_cursor *pCursor) {
|
||||
return goVEof(((goVTabCursor*)pCursor)->vTabCursor);
|
||||
}
|
||||
|
||||
char* goVColumn(void *pCursor, void *cp, int col);
|
||||
|
||||
static int cXColumn(sqlite3_vtab_cursor *pCursor, sqlite3_context *ctx, int i) {
|
||||
char *pzErr = goVColumn(((goVTabCursor*)pCursor)->vTabCursor, ctx, i);
|
||||
if (pzErr) {
|
||||
return setErrMsg(pCursor, pzErr);
|
||||
}
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
char* goVRowid(void *pCursor, sqlite3_int64 *pRowid);
|
||||
|
||||
static int cXRowid(sqlite3_vtab_cursor *pCursor, sqlite3_int64 *pRowid) {
|
||||
char *pzErr = goVRowid(((goVTabCursor*)pCursor)->vTabCursor, pRowid);
|
||||
if (pzErr) {
|
||||
return setErrMsg(pCursor, pzErr);
|
||||
}
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
char* goVUpdate(void *pVTab, int argc, sqlite3_value **argv, sqlite3_int64 *pRowid);
|
||||
|
||||
static int cXUpdate(sqlite3_vtab *pVTab, int argc, sqlite3_value **argv, sqlite3_int64 *pRowid) {
|
||||
char *pzErr = goVUpdate(((goVTab*)pVTab)->vTab, argc, argv, pRowid);
|
||||
if (pzErr) {
|
||||
if (pVTab->zErrMsg)
|
||||
sqlite3_free(pVTab->zErrMsg);
|
||||
pVTab->zErrMsg = pzErr;
|
||||
return SQLITE_ERROR;
|
||||
}
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
static sqlite3_module goModule = {
|
||||
0, // iVersion
|
||||
cXCreate, // xCreate - create a table
|
||||
cXConnect, // xConnect - connect to an existing table
|
||||
cXBestIndex, // xBestIndex - Determine search strategy
|
||||
cXDisconnect, // xDisconnect - Disconnect from a table
|
||||
cXDestroy, // xDestroy - Drop a table
|
||||
cXOpen, // xOpen - open a cursor
|
||||
cXClose, // xClose - close a cursor
|
||||
cXFilter, // xFilter - configure scan constraints
|
||||
cXNext, // xNext - advance a cursor
|
||||
cXEof, // xEof
|
||||
cXColumn, // xColumn - read data
|
||||
cXRowid, // xRowid - read data
|
||||
cXUpdate, // xUpdate - write data
|
||||
// Not implemented
|
||||
0, // xBegin - begin transaction
|
||||
0, // xSync - sync transaction
|
||||
0, // xCommit - commit transaction
|
||||
0, // xRollback - rollback transaction
|
||||
0, // xFindFunction - function overloading
|
||||
0, // xRename - rename the table
|
||||
0, // xSavepoint
|
||||
0, // xRelease
|
||||
0 // xRollbackTo
|
||||
};
|
||||
|
||||
// See https://sqlite.org/vtab.html#eponymous_only_virtual_tables
|
||||
static sqlite3_module goModuleEponymousOnly = {
|
||||
0, // iVersion
|
||||
0, // xCreate - create a table, which here is null
|
||||
cXConnect, // xConnect - connect to an existing table
|
||||
cXBestIndex, // xBestIndex - Determine search strategy
|
||||
cXDisconnect, // xDisconnect - Disconnect from a table
|
||||
cXDestroy, // xDestroy - Drop a table
|
||||
cXOpen, // xOpen - open a cursor
|
||||
cXClose, // xClose - close a cursor
|
||||
cXFilter, // xFilter - configure scan constraints
|
||||
cXNext, // xNext - advance a cursor
|
||||
cXEof, // xEof
|
||||
cXColumn, // xColumn - read data
|
||||
cXRowid, // xRowid - read data
|
||||
cXUpdate, // xUpdate - write data
|
||||
// Not implemented
|
||||
0, // xBegin - begin transaction
|
||||
0, // xSync - sync transaction
|
||||
0, // xCommit - commit transaction
|
||||
0, // xRollback - rollback transaction
|
||||
0, // xFindFunction - function overloading
|
||||
0, // xRename - rename the table
|
||||
0, // xSavepoint
|
||||
0, // xRelease
|
||||
0 // xRollbackTo
|
||||
};
|
||||
|
||||
void goMDestroy(void*);
|
||||
|
||||
static int _sqlite3_create_module(sqlite3 *db, const char *zName, uintptr_t pClientData) {
|
||||
return sqlite3_create_module_v2(db, zName, &goModule, (void*) pClientData, goMDestroy);
|
||||
}
|
||||
|
||||
static int _sqlite3_create_module_eponymous_only(sqlite3 *db, const char *zName, uintptr_t pClientData) {
|
||||
return sqlite3_create_module_v2(db, zName, &goModuleEponymousOnly, (void*) pClientData, goMDestroy);
|
||||
}
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type sqliteModule struct {
|
||||
c *SQLiteConn
|
||||
name string
|
||||
module Module
|
||||
}
|
||||
|
||||
type sqliteVTab struct {
|
||||
module *sqliteModule
|
||||
vTab VTab
|
||||
}
|
||||
|
||||
type sqliteVTabCursor struct {
|
||||
vTab *sqliteVTab
|
||||
vTabCursor VTabCursor
|
||||
}
|
||||
|
||||
// Op is type of operations.
|
||||
type Op uint8
|
||||
|
||||
// Op mean identity of operations.
|
||||
const (
|
||||
OpEQ Op = 2
|
||||
OpGT = 4
|
||||
OpLE = 8
|
||||
OpLT = 16
|
||||
OpGE = 32
|
||||
OpMATCH = 64
|
||||
OpLIKE = 65 /* 3.10.0 and later only */
|
||||
OpGLOB = 66 /* 3.10.0 and later only */
|
||||
OpREGEXP = 67 /* 3.10.0 and later only */
|
||||
OpScanUnique = 1 /* Scan visits at most 1 row */
|
||||
)
|
||||
|
||||
// InfoConstraint give information of constraint.
|
||||
type InfoConstraint struct {
|
||||
Column int
|
||||
Op Op
|
||||
Usable bool
|
||||
}
|
||||
|
||||
// InfoOrderBy give information of order-by.
|
||||
type InfoOrderBy struct {
|
||||
Column int
|
||||
Desc bool
|
||||
}
|
||||
|
||||
func constraints(info *C.sqlite3_index_info) []InfoConstraint {
|
||||
slice := *(*[]C.struct_sqlite3_index_constraint)(unsafe.Pointer(&reflect.SliceHeader{
|
||||
Data: uintptr(unsafe.Pointer(info.aConstraint)),
|
||||
Len: int(info.nConstraint),
|
||||
Cap: int(info.nConstraint),
|
||||
}))
|
||||
|
||||
cst := make([]InfoConstraint, 0, len(slice))
|
||||
for _, c := range slice {
|
||||
var usable bool
|
||||
if c.usable > 0 {
|
||||
usable = true
|
||||
}
|
||||
cst = append(cst, InfoConstraint{
|
||||
Column: int(c.iColumn),
|
||||
Op: Op(c.op),
|
||||
Usable: usable,
|
||||
})
|
||||
}
|
||||
return cst
|
||||
}
|
||||
|
||||
func orderBys(info *C.sqlite3_index_info) []InfoOrderBy {
|
||||
slice := *(*[]C.struct_sqlite3_index_orderby)(unsafe.Pointer(&reflect.SliceHeader{
|
||||
Data: uintptr(unsafe.Pointer(info.aOrderBy)),
|
||||
Len: int(info.nOrderBy),
|
||||
Cap: int(info.nOrderBy),
|
||||
}))
|
||||
|
||||
ob := make([]InfoOrderBy, 0, len(slice))
|
||||
for _, c := range slice {
|
||||
var desc bool
|
||||
if c.desc > 0 {
|
||||
desc = true
|
||||
}
|
||||
ob = append(ob, InfoOrderBy{
|
||||
Column: int(c.iColumn),
|
||||
Desc: desc,
|
||||
})
|
||||
}
|
||||
return ob
|
||||
}
|
||||
|
||||
// IndexResult is a Go struct representation of what eventually ends up in the
|
||||
// output fields for `sqlite3_index_info`
|
||||
// See: https://www.sqlite.org/c3ref/index_info.html
|
||||
type IndexResult struct {
|
||||
Used []bool // aConstraintUsage
|
||||
IdxNum int
|
||||
IdxStr string
|
||||
AlreadyOrdered bool // orderByConsumed
|
||||
EstimatedCost float64
|
||||
EstimatedRows float64
|
||||
}
|
||||
|
||||
// mPrintf is a utility wrapper around sqlite3_mprintf
|
||||
func mPrintf(format, arg string) *C.char {
|
||||
cf := C.CString(format)
|
||||
defer C.free(unsafe.Pointer(cf))
|
||||
ca := C.CString(arg)
|
||||
defer C.free(unsafe.Pointer(ca))
|
||||
return C._sqlite3_mprintf(cf, ca)
|
||||
}
|
||||
|
||||
//export goMInit
|
||||
func goMInit(db, pClientData unsafe.Pointer, argc C.int, argv **C.char, pzErr **C.char, isCreate C.int) C.uintptr_t {
|
||||
m := lookupHandle(pClientData).(*sqliteModule)
|
||||
if m.c.db != (*C.sqlite3)(db) {
|
||||
*pzErr = mPrintf("%s", "Inconsistent db handles")
|
||||
return 0
|
||||
}
|
||||
args := make([]string, argc)
|
||||
var A []*C.char
|
||||
slice := reflect.SliceHeader{Data: uintptr(unsafe.Pointer(argv)), Len: int(argc), Cap: int(argc)}
|
||||
a := reflect.NewAt(reflect.TypeOf(A), unsafe.Pointer(&slice)).Elem().Interface()
|
||||
for i, s := range a.([]*C.char) {
|
||||
args[i] = C.GoString(s)
|
||||
}
|
||||
var vTab VTab
|
||||
var err error
|
||||
if isCreate == 1 {
|
||||
vTab, err = m.module.Create(m.c, args)
|
||||
} else {
|
||||
vTab, err = m.module.Connect(m.c, args)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
*pzErr = mPrintf("%s", err.Error())
|
||||
return 0
|
||||
}
|
||||
vt := sqliteVTab{m, vTab}
|
||||
*pzErr = nil
|
||||
return C.uintptr_t(uintptr(newHandle(m.c, &vt)))
|
||||
}
|
||||
|
||||
//export goVRelease
|
||||
func goVRelease(pVTab unsafe.Pointer, isDestroy C.int) *C.char {
|
||||
vt := lookupHandle(pVTab).(*sqliteVTab)
|
||||
var err error
|
||||
if isDestroy == 1 {
|
||||
err = vt.vTab.Destroy()
|
||||
} else {
|
||||
err = vt.vTab.Disconnect()
|
||||
}
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
//export goVOpen
|
||||
func goVOpen(pVTab unsafe.Pointer, pzErr **C.char) C.uintptr_t {
|
||||
vt := lookupHandle(pVTab).(*sqliteVTab)
|
||||
vTabCursor, err := vt.vTab.Open()
|
||||
if err != nil {
|
||||
*pzErr = mPrintf("%s", err.Error())
|
||||
return 0
|
||||
}
|
||||
vtc := sqliteVTabCursor{vt, vTabCursor}
|
||||
*pzErr = nil
|
||||
return C.uintptr_t(uintptr(newHandle(vt.module.c, &vtc)))
|
||||
}
|
||||
|
||||
//export goVBestIndex
|
||||
func goVBestIndex(pVTab unsafe.Pointer, icp unsafe.Pointer) *C.char {
|
||||
vt := lookupHandle(pVTab).(*sqliteVTab)
|
||||
info := (*C.sqlite3_index_info)(icp)
|
||||
csts := constraints(info)
|
||||
res, err := vt.vTab.BestIndex(csts, orderBys(info))
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
if len(res.Used) != len(csts) {
|
||||
return mPrintf("Result.Used != expected value", "")
|
||||
}
|
||||
|
||||
// Get a pointer to constraint_usage struct so we can update in place.
|
||||
|
||||
slice := *(*[]C.struct_sqlite3_index_constraint_usage)(unsafe.Pointer(&reflect.SliceHeader{
|
||||
Data: uintptr(unsafe.Pointer(info.aConstraintUsage)),
|
||||
Len: int(info.nConstraint),
|
||||
Cap: int(info.nConstraint),
|
||||
}))
|
||||
index := 1
|
||||
for i := range slice {
|
||||
if res.Used[i] {
|
||||
slice[i].argvIndex = C.int(index)
|
||||
slice[i].omit = C.uchar(1)
|
||||
index++
|
||||
}
|
||||
}
|
||||
|
||||
info.idxNum = C.int(res.IdxNum)
|
||||
info.idxStr = (*C.char)(C.sqlite3_malloc(C.int(len(res.IdxStr) + 1)))
|
||||
if info.idxStr == nil {
|
||||
// C.malloc and C.CString ordinarily do this for you. See https://golang.org/cmd/cgo/
|
||||
panic("out of memory")
|
||||
}
|
||||
info.needToFreeIdxStr = C.int(1)
|
||||
|
||||
idxStr := *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{
|
||||
Data: uintptr(unsafe.Pointer(info.idxStr)),
|
||||
Len: len(res.IdxStr) + 1,
|
||||
Cap: len(res.IdxStr) + 1,
|
||||
}))
|
||||
copy(idxStr, res.IdxStr)
|
||||
idxStr[len(idxStr)-1] = 0 // null-terminated string
|
||||
|
||||
if res.AlreadyOrdered {
|
||||
info.orderByConsumed = C.int(1)
|
||||
}
|
||||
info.estimatedCost = C.double(res.EstimatedCost)
|
||||
info.estimatedRows = C.sqlite3_int64(res.EstimatedRows)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
//export goVClose
|
||||
func goVClose(pCursor unsafe.Pointer) *C.char {
|
||||
vtc := lookupHandle(pCursor).(*sqliteVTabCursor)
|
||||
err := vtc.vTabCursor.Close()
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
//export goMDestroy
|
||||
func goMDestroy(pClientData unsafe.Pointer) {
|
||||
m := lookupHandle(pClientData).(*sqliteModule)
|
||||
m.module.DestroyModule()
|
||||
}
|
||||
|
||||
//export goVFilter
|
||||
func goVFilter(pCursor unsafe.Pointer, idxNum C.int, idxName *C.char, argc C.int, argv **C.sqlite3_value) *C.char {
|
||||
vtc := lookupHandle(pCursor).(*sqliteVTabCursor)
|
||||
args := (*[(math.MaxInt32 - 1) / unsafe.Sizeof((*C.sqlite3_value)(nil))]*C.sqlite3_value)(unsafe.Pointer(argv))[:argc:argc]
|
||||
vals := make([]interface{}, 0, argc)
|
||||
for _, v := range args {
|
||||
conv, err := callbackArgGeneric(v)
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
vals = append(vals, conv.Interface())
|
||||
}
|
||||
err := vtc.vTabCursor.Filter(int(idxNum), C.GoString(idxName), vals)
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
//export goVNext
|
||||
func goVNext(pCursor unsafe.Pointer) *C.char {
|
||||
vtc := lookupHandle(pCursor).(*sqliteVTabCursor)
|
||||
err := vtc.vTabCursor.Next()
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
//export goVEof
|
||||
func goVEof(pCursor unsafe.Pointer) C.int {
|
||||
vtc := lookupHandle(pCursor).(*sqliteVTabCursor)
|
||||
err := vtc.vTabCursor.EOF()
|
||||
if err {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
//export goVColumn
|
||||
func goVColumn(pCursor, cp unsafe.Pointer, col C.int) *C.char {
|
||||
vtc := lookupHandle(pCursor).(*sqliteVTabCursor)
|
||||
c := (*SQLiteContext)(cp)
|
||||
err := vtc.vTabCursor.Column(c, int(col))
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
//export goVRowid
|
||||
func goVRowid(pCursor unsafe.Pointer, pRowid *C.sqlite3_int64) *C.char {
|
||||
vtc := lookupHandle(pCursor).(*sqliteVTabCursor)
|
||||
rowid, err := vtc.vTabCursor.Rowid()
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
*pRowid = C.sqlite3_int64(rowid)
|
||||
return nil
|
||||
}
|
||||
|
||||
//export goVUpdate
|
||||
func goVUpdate(pVTab unsafe.Pointer, argc C.int, argv **C.sqlite3_value, pRowid *C.sqlite3_int64) *C.char {
|
||||
vt := lookupHandle(pVTab).(*sqliteVTab)
|
||||
|
||||
var tname string
|
||||
if n, ok := vt.vTab.(interface {
|
||||
TableName() string
|
||||
}); ok {
|
||||
tname = n.TableName() + " "
|
||||
}
|
||||
|
||||
err := fmt.Errorf("virtual %s table %sis read-only", vt.module.name, tname)
|
||||
if v, ok := vt.vTab.(VTabUpdater); ok {
|
||||
// convert argv
|
||||
args := (*[(math.MaxInt32 - 1) / unsafe.Sizeof((*C.sqlite3_value)(nil))]*C.sqlite3_value)(unsafe.Pointer(argv))[:argc:argc]
|
||||
vals := make([]interface{}, 0, argc)
|
||||
for _, v := range args {
|
||||
conv, err := callbackArgGeneric(v)
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
|
||||
// work around for SQLITE_NULL
|
||||
x := conv.Interface()
|
||||
if z, ok := x.([]byte); ok && z == nil {
|
||||
x = nil
|
||||
}
|
||||
|
||||
vals = append(vals, x)
|
||||
}
|
||||
|
||||
switch {
|
||||
case argc == 1:
|
||||
err = v.Delete(vals[0])
|
||||
|
||||
case argc > 1 && vals[0] == nil:
|
||||
var id int64
|
||||
id, err = v.Insert(vals[1], vals[2:])
|
||||
if err == nil {
|
||||
*pRowid = C.sqlite3_int64(id)
|
||||
}
|
||||
|
||||
case argc > 1:
|
||||
err = v.Update(vals[1], vals[2:])
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return mPrintf("%s", err.Error())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Module is a "virtual table module", it defines the implementation of a
|
||||
// virtual tables. See: http://sqlite.org/c3ref/module.html
|
||||
type Module interface {
|
||||
// http://sqlite.org/vtab.html#xcreate
|
||||
Create(c *SQLiteConn, args []string) (VTab, error)
|
||||
// http://sqlite.org/vtab.html#xconnect
|
||||
Connect(c *SQLiteConn, args []string) (VTab, error)
|
||||
// http://sqlite.org/c3ref/create_module.html
|
||||
DestroyModule()
|
||||
}
|
||||
|
||||
// EponymousOnlyModule is a "virtual table module" (as above), but
|
||||
// for defining "eponymous only" virtual tables See: https://sqlite.org/vtab.html#eponymous_only_virtual_tables
|
||||
type EponymousOnlyModule interface {
|
||||
Module
|
||||
EponymousOnlyModule()
|
||||
}
|
||||
|
||||
// VTab describes a particular instance of the virtual table.
|
||||
// See: http://sqlite.org/c3ref/vtab.html
|
||||
type VTab interface {
|
||||
// http://sqlite.org/vtab.html#xbestindex
|
||||
BestIndex([]InfoConstraint, []InfoOrderBy) (*IndexResult, error)
|
||||
// http://sqlite.org/vtab.html#xdisconnect
|
||||
Disconnect() error
|
||||
// http://sqlite.org/vtab.html#sqlite3_module.xDestroy
|
||||
Destroy() error
|
||||
// http://sqlite.org/vtab.html#xopen
|
||||
Open() (VTabCursor, error)
|
||||
}
|
||||
|
||||
// VTabUpdater is a type that allows a VTab to be inserted, updated, or
|
||||
// deleted.
|
||||
// See: https://sqlite.org/vtab.html#xupdate
|
||||
type VTabUpdater interface {
|
||||
Delete(interface{}) error
|
||||
Insert(interface{}, []interface{}) (int64, error)
|
||||
Update(interface{}, []interface{}) error
|
||||
}
|
||||
|
||||
// VTabCursor describes cursors that point into the virtual table and are used
|
||||
// to loop through the virtual table. See: http://sqlite.org/c3ref/vtab_cursor.html
|
||||
type VTabCursor interface {
|
||||
// http://sqlite.org/vtab.html#xclose
|
||||
Close() error
|
||||
// http://sqlite.org/vtab.html#xfilter
|
||||
Filter(idxNum int, idxStr string, vals []interface{}) error
|
||||
// http://sqlite.org/vtab.html#xnext
|
||||
Next() error
|
||||
// http://sqlite.org/vtab.html#xeof
|
||||
EOF() bool
|
||||
// http://sqlite.org/vtab.html#xcolumn
|
||||
Column(c *SQLiteContext, col int) error
|
||||
// http://sqlite.org/vtab.html#xrowid
|
||||
Rowid() (int64, error)
|
||||
}
|
||||
|
||||
// DeclareVTab declares the Schema of a virtual table.
|
||||
// See: http://sqlite.org/c3ref/declare_vtab.html
|
||||
func (c *SQLiteConn) DeclareVTab(sql string) error {
|
||||
zSQL := C.CString(sql)
|
||||
defer C.free(unsafe.Pointer(zSQL))
|
||||
rv := C.sqlite3_declare_vtab(c.db, zSQL)
|
||||
if rv != C.SQLITE_OK {
|
||||
return c.lastError()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateModule registers a virtual table implementation.
|
||||
// See: http://sqlite.org/c3ref/create_module.html
|
||||
func (c *SQLiteConn) CreateModule(moduleName string, module Module) error {
|
||||
mname := C.CString(moduleName)
|
||||
defer C.free(unsafe.Pointer(mname))
|
||||
udm := sqliteModule{c, moduleName, module}
|
||||
switch module.(type) {
|
||||
case EponymousOnlyModule:
|
||||
rv := C._sqlite3_create_module_eponymous_only(c.db, mname, C.uintptr_t(uintptr(newHandle(c, &udm))))
|
||||
if rv != C.SQLITE_OK {
|
||||
return c.lastError()
|
||||
}
|
||||
return nil
|
||||
case Module:
|
||||
rv := C._sqlite3_create_module(c.db, mname, C.uintptr_t(uintptr(newHandle(c, &udm))))
|
||||
if rv != C.SQLITE_OK {
|
||||
return c.lastError()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !windows
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -I.
|
||||
#cgo linux LDFLAGS: -ldl
|
||||
#cgo linux,ppc LDFLAGS: -lpthread
|
||||
#cgo linux,ppc64 LDFLAGS: -lpthread
|
||||
#cgo linux,ppc64le LDFLAGS: -lpthread
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,14 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build solaris
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -D__EXTENSIONS__=1
|
||||
#cgo LDFLAGS: -lc
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,287 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build sqlite_trace trace
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
|
||||
int traceCallbackTrampoline(unsigned int traceEventCode, void *ctx, void *p, void *x);
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Trace... constants identify the possible events causing callback invocation.
|
||||
// Values are same as the corresponding SQLite Trace Event Codes.
|
||||
const (
|
||||
TraceStmt = uint32(C.SQLITE_TRACE_STMT)
|
||||
TraceProfile = uint32(C.SQLITE_TRACE_PROFILE)
|
||||
TraceRow = uint32(C.SQLITE_TRACE_ROW)
|
||||
TraceClose = uint32(C.SQLITE_TRACE_CLOSE)
|
||||
)
|
||||
|
||||
type TraceInfo struct {
|
||||
// Pack together the shorter fields, to keep the struct smaller.
|
||||
// On a 64-bit machine there would be padding
|
||||
// between EventCode and ConnHandle; having AutoCommit here is "free":
|
||||
EventCode uint32
|
||||
AutoCommit bool
|
||||
ConnHandle uintptr
|
||||
|
||||
// Usually filled, unless EventCode = TraceClose = SQLITE_TRACE_CLOSE:
|
||||
// identifier for a prepared statement:
|
||||
StmtHandle uintptr
|
||||
|
||||
// Two strings filled when EventCode = TraceStmt = SQLITE_TRACE_STMT:
|
||||
// (1) either the unexpanded SQL text of the prepared statement, or
|
||||
// an SQL comment that indicates the invocation of a trigger;
|
||||
// (2) expanded SQL, if requested and if (1) is not an SQL comment.
|
||||
StmtOrTrigger string
|
||||
ExpandedSQL string // only if requested (TraceConfig.WantExpandedSQL = true)
|
||||
|
||||
// filled when EventCode = TraceProfile = SQLITE_TRACE_PROFILE:
|
||||
// estimated number of nanoseconds that the prepared statement took to run:
|
||||
RunTimeNanosec int64
|
||||
|
||||
DBError Error
|
||||
}
|
||||
|
||||
// TraceUserCallback gives the signature for a trace function
|
||||
// provided by the user (Go application programmer).
|
||||
// SQLite 3.14 documentation (as of September 2, 2016)
|
||||
// for SQL Trace Hook = sqlite3_trace_v2():
|
||||
// The integer return value from the callback is currently ignored,
|
||||
// though this may change in future releases. Callback implementations
|
||||
// should return zero to ensure future compatibility.
|
||||
type TraceUserCallback func(TraceInfo) int
|
||||
|
||||
type TraceConfig struct {
|
||||
Callback TraceUserCallback
|
||||
EventMask uint32
|
||||
WantExpandedSQL bool
|
||||
}
|
||||
|
||||
func fillDBError(dbErr *Error, db *C.sqlite3) {
|
||||
// See SQLiteConn.lastError(), in file 'sqlite3.go' at the time of writing (Sept 5, 2016)
|
||||
dbErr.Code = ErrNo(C.sqlite3_errcode(db))
|
||||
dbErr.ExtendedCode = ErrNoExtended(C.sqlite3_extended_errcode(db))
|
||||
dbErr.err = C.GoString(C.sqlite3_errmsg(db))
|
||||
}
|
||||
|
||||
func fillExpandedSQL(info *TraceInfo, db *C.sqlite3, pStmt unsafe.Pointer) {
|
||||
if pStmt == nil {
|
||||
panic("No SQLite statement pointer in P arg of trace_v2 callback")
|
||||
}
|
||||
|
||||
expSQLiteCStr := C.sqlite3_expanded_sql((*C.sqlite3_stmt)(pStmt))
|
||||
defer C.sqlite3_free(unsafe.Pointer(expSQLiteCStr))
|
||||
if expSQLiteCStr == nil {
|
||||
fillDBError(&info.DBError, db)
|
||||
return
|
||||
}
|
||||
info.ExpandedSQL = C.GoString(expSQLiteCStr)
|
||||
}
|
||||
|
||||
//export traceCallbackTrampoline
|
||||
func traceCallbackTrampoline(
|
||||
traceEventCode C.uint,
|
||||
// Parameter named 'C' in SQLite docs = Context given at registration:
|
||||
ctx unsafe.Pointer,
|
||||
// Parameter named 'P' in SQLite docs (Primary event data?):
|
||||
p unsafe.Pointer,
|
||||
// Parameter named 'X' in SQLite docs (eXtra event data?):
|
||||
xValue unsafe.Pointer) C.int {
|
||||
|
||||
eventCode := uint32(traceEventCode)
|
||||
|
||||
if ctx == nil {
|
||||
panic(fmt.Sprintf("No context (ev 0x%x)", traceEventCode))
|
||||
}
|
||||
|
||||
contextDB := (*C.sqlite3)(ctx)
|
||||
connHandle := uintptr(ctx)
|
||||
|
||||
var traceConf TraceConfig
|
||||
var found bool
|
||||
if eventCode == TraceClose {
|
||||
// clean up traceMap: 'pop' means get and delete
|
||||
traceConf, found = popTraceMapping(connHandle)
|
||||
} else {
|
||||
traceConf, found = lookupTraceMapping(connHandle)
|
||||
}
|
||||
|
||||
if !found {
|
||||
panic(fmt.Sprintf("Mapping not found for handle 0x%x (ev 0x%x)",
|
||||
connHandle, eventCode))
|
||||
}
|
||||
|
||||
var info TraceInfo
|
||||
|
||||
info.EventCode = eventCode
|
||||
info.AutoCommit = (int(C.sqlite3_get_autocommit(contextDB)) != 0)
|
||||
info.ConnHandle = connHandle
|
||||
|
||||
switch eventCode {
|
||||
case TraceStmt:
|
||||
info.StmtHandle = uintptr(p)
|
||||
|
||||
var xStr string
|
||||
if xValue != nil {
|
||||
xStr = C.GoString((*C.char)(xValue))
|
||||
}
|
||||
info.StmtOrTrigger = xStr
|
||||
if !strings.HasPrefix(xStr, "--") {
|
||||
// Not SQL comment, therefore the current event
|
||||
// is not related to a trigger.
|
||||
// The user might want to receive the expanded SQL;
|
||||
// let's check:
|
||||
if traceConf.WantExpandedSQL {
|
||||
fillExpandedSQL(&info, contextDB, p)
|
||||
}
|
||||
}
|
||||
|
||||
case TraceProfile:
|
||||
info.StmtHandle = uintptr(p)
|
||||
|
||||
if xValue == nil {
|
||||
panic("NULL pointer in X arg of trace_v2 callback for SQLITE_TRACE_PROFILE event")
|
||||
}
|
||||
|
||||
info.RunTimeNanosec = *(*int64)(xValue)
|
||||
|
||||
// sample the error //TODO: is it safe? is it useful?
|
||||
fillDBError(&info.DBError, contextDB)
|
||||
|
||||
case TraceRow:
|
||||
info.StmtHandle = uintptr(p)
|
||||
|
||||
case TraceClose:
|
||||
handle := uintptr(p)
|
||||
if handle != info.ConnHandle {
|
||||
panic(fmt.Sprintf("Different conn handle 0x%x (expected 0x%x) in SQLITE_TRACE_CLOSE event.",
|
||||
handle, info.ConnHandle))
|
||||
}
|
||||
|
||||
default:
|
||||
// Pass unsupported events to the user callback (if configured);
|
||||
// let the user callback decide whether to panic or ignore them.
|
||||
}
|
||||
|
||||
// Do not execute user callback when the event was not requested by user!
|
||||
// Remember that the Close event is always selected when
|
||||
// registering this callback trampoline with SQLite --- for cleanup.
|
||||
// In the future there may be more events forced to "selected" in SQLite
|
||||
// for the driver's needs.
|
||||
if traceConf.EventMask&eventCode == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
r := 0
|
||||
if traceConf.Callback != nil {
|
||||
r = traceConf.Callback(info)
|
||||
}
|
||||
return C.int(r)
|
||||
}
|
||||
|
||||
type traceMapEntry struct {
|
||||
config TraceConfig
|
||||
}
|
||||
|
||||
var traceMapLock sync.Mutex
|
||||
var traceMap = make(map[uintptr]traceMapEntry)
|
||||
|
||||
func addTraceMapping(connHandle uintptr, traceConf TraceConfig) {
|
||||
traceMapLock.Lock()
|
||||
defer traceMapLock.Unlock()
|
||||
|
||||
oldEntryCopy, found := traceMap[connHandle]
|
||||
if found {
|
||||
panic(fmt.Sprintf("Adding trace config %v: handle 0x%x already registered (%v).",
|
||||
traceConf, connHandle, oldEntryCopy.config))
|
||||
}
|
||||
traceMap[connHandle] = traceMapEntry{config: traceConf}
|
||||
}
|
||||
|
||||
func lookupTraceMapping(connHandle uintptr) (TraceConfig, bool) {
|
||||
traceMapLock.Lock()
|
||||
defer traceMapLock.Unlock()
|
||||
|
||||
entryCopy, found := traceMap[connHandle]
|
||||
return entryCopy.config, found
|
||||
}
|
||||
|
||||
// 'pop' = get and delete from map before returning the value to the caller
|
||||
func popTraceMapping(connHandle uintptr) (TraceConfig, bool) {
|
||||
traceMapLock.Lock()
|
||||
defer traceMapLock.Unlock()
|
||||
|
||||
entryCopy, found := traceMap[connHandle]
|
||||
if found {
|
||||
delete(traceMap, connHandle)
|
||||
}
|
||||
return entryCopy.config, found
|
||||
}
|
||||
|
||||
// SetTrace installs or removes the trace callback for the given database connection.
|
||||
// It's not named 'RegisterTrace' because only one callback can be kept and called.
|
||||
// Calling SetTrace a second time on same database connection
|
||||
// overrides (cancels) any prior callback and all its settings:
|
||||
// event mask, etc.
|
||||
func (c *SQLiteConn) SetTrace(requested *TraceConfig) error {
|
||||
connHandle := uintptr(unsafe.Pointer(c.db))
|
||||
|
||||
_, _ = popTraceMapping(connHandle)
|
||||
|
||||
if requested == nil {
|
||||
// The traceMap entry was deleted already by popTraceMapping():
|
||||
// can disable all events now, no need to watch for TraceClose.
|
||||
err := c.setSQLiteTrace(0)
|
||||
return err
|
||||
}
|
||||
|
||||
reqCopy := *requested
|
||||
|
||||
// Disable potentially expensive operations
|
||||
// if their result will not be used. We are doing this
|
||||
// just in case the caller provided nonsensical input.
|
||||
if reqCopy.EventMask&TraceStmt == 0 {
|
||||
reqCopy.WantExpandedSQL = false
|
||||
}
|
||||
|
||||
addTraceMapping(connHandle, reqCopy)
|
||||
|
||||
// The callback trampoline function does cleanup on Close event,
|
||||
// regardless of the presence or absence of the user callback.
|
||||
// Therefore it needs the Close event to be selected:
|
||||
actualEventMask := uint(reqCopy.EventMask | TraceClose)
|
||||
err := c.setSQLiteTrace(actualEventMask)
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *SQLiteConn) setSQLiteTrace(sqliteEventMask uint) error {
|
||||
rv := C.sqlite3_trace_v2(c.db,
|
||||
C.uint(sqliteEventMask),
|
||||
(*[0]byte)(unsafe.Pointer(C.traceCallbackTrampoline)),
|
||||
unsafe.Pointer(c.db)) // Fourth arg is same as first: we are
|
||||
// passing the database connection handle as callback context.
|
||||
|
||||
if rv != C.SQLITE_OK {
|
||||
return c.lastError()
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"database/sql"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ColumnTypeDatabaseTypeName implement RowsColumnTypeDatabaseTypeName.
|
||||
func (rc *SQLiteRows) ColumnTypeDatabaseTypeName(i int) string {
|
||||
return C.GoString(C.sqlite3_column_decltype(rc.s.s, C.int(i)))
|
||||
}
|
||||
|
||||
/*
|
||||
func (rc *SQLiteRows) ColumnTypeLength(index int) (length int64, ok bool) {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func (rc *SQLiteRows) ColumnTypePrecisionScale(index int) (precision, scale int64, ok bool) {
|
||||
return 0, 0, false
|
||||
}
|
||||
*/
|
||||
|
||||
// ColumnTypeNullable implement RowsColumnTypeNullable.
|
||||
func (rc *SQLiteRows) ColumnTypeNullable(i int) (nullable, ok bool) {
|
||||
return true, true
|
||||
}
|
||||
|
||||
// ColumnTypeScanType implement RowsColumnTypeScanType.
|
||||
func (rc *SQLiteRows) ColumnTypeScanType(i int) reflect.Type {
|
||||
//ct := C.sqlite3_column_type(rc.s.s, C.int(i)) // Always returns 5
|
||||
return scanType(C.GoString(C.sqlite3_column_decltype(rc.s.s, C.int(i))))
|
||||
}
|
||||
|
||||
const (
|
||||
SQLITE_INTEGER = iota
|
||||
SQLITE_TEXT
|
||||
SQLITE_BLOB
|
||||
SQLITE_REAL
|
||||
SQLITE_NUMERIC
|
||||
SQLITE_TIME
|
||||
SQLITE_BOOL
|
||||
SQLITE_NULL
|
||||
)
|
||||
|
||||
func scanType(cdt string) reflect.Type {
|
||||
t := strings.ToUpper(cdt)
|
||||
i := databaseTypeConvSqlite(t)
|
||||
switch i {
|
||||
case SQLITE_INTEGER:
|
||||
return reflect.TypeOf(sql.NullInt64{})
|
||||
case SQLITE_TEXT:
|
||||
return reflect.TypeOf(sql.NullString{})
|
||||
case SQLITE_BLOB:
|
||||
return reflect.TypeOf(sql.RawBytes{})
|
||||
case SQLITE_REAL:
|
||||
return reflect.TypeOf(sql.NullFloat64{})
|
||||
case SQLITE_NUMERIC:
|
||||
return reflect.TypeOf(sql.NullFloat64{})
|
||||
case SQLITE_BOOL:
|
||||
return reflect.TypeOf(sql.NullBool{})
|
||||
case SQLITE_TIME:
|
||||
return reflect.TypeOf(sql.NullTime{})
|
||||
}
|
||||
return reflect.TypeOf(new(interface{}))
|
||||
}
|
||||
|
||||
func databaseTypeConvSqlite(t string) int {
|
||||
if strings.Contains(t, "INT") {
|
||||
return SQLITE_INTEGER
|
||||
}
|
||||
if t == "CLOB" || t == "TEXT" ||
|
||||
strings.Contains(t, "CHAR") {
|
||||
return SQLITE_TEXT
|
||||
}
|
||||
if t == "BLOB" {
|
||||
return SQLITE_BLOB
|
||||
}
|
||||
if t == "REAL" || t == "FLOAT" ||
|
||||
strings.Contains(t, "DOUBLE") {
|
||||
return SQLITE_REAL
|
||||
}
|
||||
if t == "DATE" || t == "DATETIME" ||
|
||||
t == "TIMESTAMP" {
|
||||
return SQLITE_TIME
|
||||
}
|
||||
if t == "NUMERIC" ||
|
||||
strings.Contains(t, "DECIMAL") {
|
||||
return SQLITE_NUMERIC
|
||||
}
|
||||
if t == "BOOLEAN" {
|
||||
return SQLITE_BOOL
|
||||
}
|
||||
|
||||
return SQLITE_NULL
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
// Copyright (C) 2018 G.J.R. Timmer <gjr.timmer@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build cgo
|
||||
|
||||
package sqlite3
|
||||
|
||||
// usleep is a function available on *nix based systems.
|
||||
// This function is not present in Windows.
|
||||
// Windows has a sleep function but this works with seconds
|
||||
// and not with microseconds as usleep.
|
||||
//
|
||||
// This code should improve performance on windows because
|
||||
// without the presence of usleep SQLite waits 1 second.
|
||||
//
|
||||
// Source: https://github.com/php/php-src/blob/PHP-5.0/win32/time.c
|
||||
// License: https://github.com/php/php-src/blob/PHP-5.0/LICENSE
|
||||
// Details: https://stackoverflow.com/questions/5801813/c-usleep-is-obsolete-workarounds-for-windows-mingw?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
|
||||
|
||||
/*
|
||||
#include <windows.h>
|
||||
|
||||
void usleep(__int64 usec)
|
||||
{
|
||||
HANDLE timer;
|
||||
LARGE_INTEGER ft;
|
||||
|
||||
// Convert to 100 nanosecond interval, negative value indicates relative time
|
||||
ft.QuadPart = -(10*usec);
|
||||
|
||||
timer = CreateWaitableTimer(NULL, TRUE, NULL);
|
||||
SetWaitableTimer(timer, &ft, 0, NULL, NULL, 0);
|
||||
WaitForSingleObject(timer, INFINITE);
|
||||
CloseHandle(timer);
|
||||
}
|
||||
*/
|
||||
import "C"
|
||||
|
||||
// EOF
|
|
@ -0,0 +1,18 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build windows
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -I.
|
||||
#cgo CFLAGS: -fno-stack-check
|
||||
#cgo CFLAGS: -fno-stack-protector
|
||||
#cgo CFLAGS: -mno-stack-arg-probe
|
||||
#cgo LDFLAGS: -lmingwex -lmingw32
|
||||
#cgo windows,386 CFLAGS: -D_USE_32BIT_TIME_T
|
||||
*/
|
||||
import "C"
|
|
@ -0,0 +1,698 @@
|
|||
#ifndef USE_LIBSQLITE3
|
||||
/*
|
||||
** 2006 June 7
|
||||
**
|
||||
** The author disclaims copyright to this source code. In place of
|
||||
** a legal notice, here is a blessing:
|
||||
**
|
||||
** May you do good and not evil.
|
||||
** May you find forgiveness for yourself and forgive others.
|
||||
** May you share freely, never taking more than you give.
|
||||
**
|
||||
*************************************************************************
|
||||
** This header file defines the SQLite interface for use by
|
||||
** shared libraries that want to be imported as extensions into
|
||||
** an SQLite instance. Shared libraries that intend to be loaded
|
||||
** as extensions by SQLite should #include this file instead of
|
||||
** sqlite3.h.
|
||||
*/
|
||||
#ifndef SQLITE3EXT_H
|
||||
#define SQLITE3EXT_H
|
||||
#include "sqlite3-binding.h"
|
||||
#ifdef __clang__
|
||||
#define assert(condition) ((void)0)
|
||||
#endif
|
||||
|
||||
|
||||
/*
|
||||
** The following structure holds pointers to all of the SQLite API
|
||||
** routines.
|
||||
**
|
||||
** WARNING: In order to maintain backwards compatibility, add new
|
||||
** interfaces to the end of this structure only. If you insert new
|
||||
** interfaces in the middle of this structure, then older different
|
||||
** versions of SQLite will not be able to load each other's shared
|
||||
** libraries!
|
||||
*/
|
||||
struct sqlite3_api_routines {
|
||||
void * (*aggregate_context)(sqlite3_context*,int nBytes);
|
||||
int (*aggregate_count)(sqlite3_context*);
|
||||
int (*bind_blob)(sqlite3_stmt*,int,const void*,int n,void(*)(void*));
|
||||
int (*bind_double)(sqlite3_stmt*,int,double);
|
||||
int (*bind_int)(sqlite3_stmt*,int,int);
|
||||
int (*bind_int64)(sqlite3_stmt*,int,sqlite_int64);
|
||||
int (*bind_null)(sqlite3_stmt*,int);
|
||||
int (*bind_parameter_count)(sqlite3_stmt*);
|
||||
int (*bind_parameter_index)(sqlite3_stmt*,const char*zName);
|
||||
const char * (*bind_parameter_name)(sqlite3_stmt*,int);
|
||||
int (*bind_text)(sqlite3_stmt*,int,const char*,int n,void(*)(void*));
|
||||
int (*bind_text16)(sqlite3_stmt*,int,const void*,int,void(*)(void*));
|
||||
int (*bind_value)(sqlite3_stmt*,int,const sqlite3_value*);
|
||||
int (*busy_handler)(sqlite3*,int(*)(void*,int),void*);
|
||||
int (*busy_timeout)(sqlite3*,int ms);
|
||||
int (*changes)(sqlite3*);
|
||||
int (*close)(sqlite3*);
|
||||
int (*collation_needed)(sqlite3*,void*,void(*)(void*,sqlite3*,
|
||||
int eTextRep,const char*));
|
||||
int (*collation_needed16)(sqlite3*,void*,void(*)(void*,sqlite3*,
|
||||
int eTextRep,const void*));
|
||||
const void * (*column_blob)(sqlite3_stmt*,int iCol);
|
||||
int (*column_bytes)(sqlite3_stmt*,int iCol);
|
||||
int (*column_bytes16)(sqlite3_stmt*,int iCol);
|
||||
int (*column_count)(sqlite3_stmt*pStmt);
|
||||
const char * (*column_database_name)(sqlite3_stmt*,int);
|
||||
const void * (*column_database_name16)(sqlite3_stmt*,int);
|
||||
const char * (*column_decltype)(sqlite3_stmt*,int i);
|
||||
const void * (*column_decltype16)(sqlite3_stmt*,int);
|
||||
double (*column_double)(sqlite3_stmt*,int iCol);
|
||||
int (*column_int)(sqlite3_stmt*,int iCol);
|
||||
sqlite_int64 (*column_int64)(sqlite3_stmt*,int iCol);
|
||||
const char * (*column_name)(sqlite3_stmt*,int);
|
||||
const void * (*column_name16)(sqlite3_stmt*,int);
|
||||
const char * (*column_origin_name)(sqlite3_stmt*,int);
|
||||
const void * (*column_origin_name16)(sqlite3_stmt*,int);
|
||||
const char * (*column_table_name)(sqlite3_stmt*,int);
|
||||
const void * (*column_table_name16)(sqlite3_stmt*,int);
|
||||
const unsigned char * (*column_text)(sqlite3_stmt*,int iCol);
|
||||
const void * (*column_text16)(sqlite3_stmt*,int iCol);
|
||||
int (*column_type)(sqlite3_stmt*,int iCol);
|
||||
sqlite3_value* (*column_value)(sqlite3_stmt*,int iCol);
|
||||
void * (*commit_hook)(sqlite3*,int(*)(void*),void*);
|
||||
int (*complete)(const char*sql);
|
||||
int (*complete16)(const void*sql);
|
||||
int (*create_collation)(sqlite3*,const char*,int,void*,
|
||||
int(*)(void*,int,const void*,int,const void*));
|
||||
int (*create_collation16)(sqlite3*,const void*,int,void*,
|
||||
int(*)(void*,int,const void*,int,const void*));
|
||||
int (*create_function)(sqlite3*,const char*,int,int,void*,
|
||||
void (*xFunc)(sqlite3_context*,int,sqlite3_value**),
|
||||
void (*xStep)(sqlite3_context*,int,sqlite3_value**),
|
||||
void (*xFinal)(sqlite3_context*));
|
||||
int (*create_function16)(sqlite3*,const void*,int,int,void*,
|
||||
void (*xFunc)(sqlite3_context*,int,sqlite3_value**),
|
||||
void (*xStep)(sqlite3_context*,int,sqlite3_value**),
|
||||
void (*xFinal)(sqlite3_context*));
|
||||
int (*create_module)(sqlite3*,const char*,const sqlite3_module*,void*);
|
||||
int (*data_count)(sqlite3_stmt*pStmt);
|
||||
sqlite3 * (*db_handle)(sqlite3_stmt*);
|
||||
int (*declare_vtab)(sqlite3*,const char*);
|
||||
int (*enable_shared_cache)(int);
|
||||
int (*errcode)(sqlite3*db);
|
||||
const char * (*errmsg)(sqlite3*);
|
||||
const void * (*errmsg16)(sqlite3*);
|
||||
int (*exec)(sqlite3*,const char*,sqlite3_callback,void*,char**);
|
||||
int (*expired)(sqlite3_stmt*);
|
||||
int (*finalize)(sqlite3_stmt*pStmt);
|
||||
void (*free)(void*);
|
||||
void (*free_table)(char**result);
|
||||
int (*get_autocommit)(sqlite3*);
|
||||
void * (*get_auxdata)(sqlite3_context*,int);
|
||||
int (*get_table)(sqlite3*,const char*,char***,int*,int*,char**);
|
||||
int (*global_recover)(void);
|
||||
void (*interruptx)(sqlite3*);
|
||||
sqlite_int64 (*last_insert_rowid)(sqlite3*);
|
||||
const char * (*libversion)(void);
|
||||
int (*libversion_number)(void);
|
||||
void *(*malloc)(int);
|
||||
char * (*mprintf)(const char*,...);
|
||||
int (*open)(const char*,sqlite3**);
|
||||
int (*open16)(const void*,sqlite3**);
|
||||
int (*prepare)(sqlite3*,const char*,int,sqlite3_stmt**,const char**);
|
||||
int (*prepare16)(sqlite3*,const void*,int,sqlite3_stmt**,const void**);
|
||||
void * (*profile)(sqlite3*,void(*)(void*,const char*,sqlite_uint64),void*);
|
||||
void (*progress_handler)(sqlite3*,int,int(*)(void*),void*);
|
||||
void *(*realloc)(void*,int);
|
||||
int (*reset)(sqlite3_stmt*pStmt);
|
||||
void (*result_blob)(sqlite3_context*,const void*,int,void(*)(void*));
|
||||
void (*result_double)(sqlite3_context*,double);
|
||||
void (*result_error)(sqlite3_context*,const char*,int);
|
||||
void (*result_error16)(sqlite3_context*,const void*,int);
|
||||
void (*result_int)(sqlite3_context*,int);
|
||||
void (*result_int64)(sqlite3_context*,sqlite_int64);
|
||||
void (*result_null)(sqlite3_context*);
|
||||
void (*result_text)(sqlite3_context*,const char*,int,void(*)(void*));
|
||||
void (*result_text16)(sqlite3_context*,const void*,int,void(*)(void*));
|
||||
void (*result_text16be)(sqlite3_context*,const void*,int,void(*)(void*));
|
||||
void (*result_text16le)(sqlite3_context*,const void*,int,void(*)(void*));
|
||||
void (*result_value)(sqlite3_context*,sqlite3_value*);
|
||||
void * (*rollback_hook)(sqlite3*,void(*)(void*),void*);
|
||||
int (*set_authorizer)(sqlite3*,int(*)(void*,int,const char*,const char*,
|
||||
const char*,const char*),void*);
|
||||
void (*set_auxdata)(sqlite3_context*,int,void*,void (*)(void*));
|
||||
char * (*xsnprintf)(int,char*,const char*,...);
|
||||
int (*step)(sqlite3_stmt*);
|
||||
int (*table_column_metadata)(sqlite3*,const char*,const char*,const char*,
|
||||
char const**,char const**,int*,int*,int*);
|
||||
void (*thread_cleanup)(void);
|
||||
int (*total_changes)(sqlite3*);
|
||||
void * (*trace)(sqlite3*,void(*xTrace)(void*,const char*),void*);
|
||||
int (*transfer_bindings)(sqlite3_stmt*,sqlite3_stmt*);
|
||||
void * (*update_hook)(sqlite3*,void(*)(void*,int ,char const*,char const*,
|
||||
sqlite_int64),void*);
|
||||
void * (*user_data)(sqlite3_context*);
|
||||
const void * (*value_blob)(sqlite3_value*);
|
||||
int (*value_bytes)(sqlite3_value*);
|
||||
int (*value_bytes16)(sqlite3_value*);
|
||||
double (*value_double)(sqlite3_value*);
|
||||
int (*value_int)(sqlite3_value*);
|
||||
sqlite_int64 (*value_int64)(sqlite3_value*);
|
||||
int (*value_numeric_type)(sqlite3_value*);
|
||||
const unsigned char * (*value_text)(sqlite3_value*);
|
||||
const void * (*value_text16)(sqlite3_value*);
|
||||
const void * (*value_text16be)(sqlite3_value*);
|
||||
const void * (*value_text16le)(sqlite3_value*);
|
||||
int (*value_type)(sqlite3_value*);
|
||||
char *(*vmprintf)(const char*,va_list);
|
||||
/* Added ??? */
|
||||
int (*overload_function)(sqlite3*, const char *zFuncName, int nArg);
|
||||
/* Added by 3.3.13 */
|
||||
int (*prepare_v2)(sqlite3*,const char*,int,sqlite3_stmt**,const char**);
|
||||
int (*prepare16_v2)(sqlite3*,const void*,int,sqlite3_stmt**,const void**);
|
||||
int (*clear_bindings)(sqlite3_stmt*);
|
||||
/* Added by 3.4.1 */
|
||||
int (*create_module_v2)(sqlite3*,const char*,const sqlite3_module*,void*,
|
||||
void (*xDestroy)(void *));
|
||||
/* Added by 3.5.0 */
|
||||
int (*bind_zeroblob)(sqlite3_stmt*,int,int);
|
||||
int (*blob_bytes)(sqlite3_blob*);
|
||||
int (*blob_close)(sqlite3_blob*);
|
||||
int (*blob_open)(sqlite3*,const char*,const char*,const char*,sqlite3_int64,
|
||||
int,sqlite3_blob**);
|
||||
int (*blob_read)(sqlite3_blob*,void*,int,int);
|
||||
int (*blob_write)(sqlite3_blob*,const void*,int,int);
|
||||
int (*create_collation_v2)(sqlite3*,const char*,int,void*,
|
||||
int(*)(void*,int,const void*,int,const void*),
|
||||
void(*)(void*));
|
||||
int (*file_control)(sqlite3*,const char*,int,void*);
|
||||
sqlite3_int64 (*memory_highwater)(int);
|
||||
sqlite3_int64 (*memory_used)(void);
|
||||
sqlite3_mutex *(*mutex_alloc)(int);
|
||||
void (*mutex_enter)(sqlite3_mutex*);
|
||||
void (*mutex_free)(sqlite3_mutex*);
|
||||
void (*mutex_leave)(sqlite3_mutex*);
|
||||
int (*mutex_try)(sqlite3_mutex*);
|
||||
int (*open_v2)(const char*,sqlite3**,int,const char*);
|
||||
int (*release_memory)(int);
|
||||
void (*result_error_nomem)(sqlite3_context*);
|
||||
void (*result_error_toobig)(sqlite3_context*);
|
||||
int (*sleep)(int);
|
||||
void (*soft_heap_limit)(int);
|
||||
sqlite3_vfs *(*vfs_find)(const char*);
|
||||
int (*vfs_register)(sqlite3_vfs*,int);
|
||||
int (*vfs_unregister)(sqlite3_vfs*);
|
||||
int (*xthreadsafe)(void);
|
||||
void (*result_zeroblob)(sqlite3_context*,int);
|
||||
void (*result_error_code)(sqlite3_context*,int);
|
||||
int (*test_control)(int, ...);
|
||||
void (*randomness)(int,void*);
|
||||
sqlite3 *(*context_db_handle)(sqlite3_context*);
|
||||
int (*extended_result_codes)(sqlite3*,int);
|
||||
int (*limit)(sqlite3*,int,int);
|
||||
sqlite3_stmt *(*next_stmt)(sqlite3*,sqlite3_stmt*);
|
||||
const char *(*sql)(sqlite3_stmt*);
|
||||
int (*status)(int,int*,int*,int);
|
||||
int (*backup_finish)(sqlite3_backup*);
|
||||
sqlite3_backup *(*backup_init)(sqlite3*,const char*,sqlite3*,const char*);
|
||||
int (*backup_pagecount)(sqlite3_backup*);
|
||||
int (*backup_remaining)(sqlite3_backup*);
|
||||
int (*backup_step)(sqlite3_backup*,int);
|
||||
const char *(*compileoption_get)(int);
|
||||
int (*compileoption_used)(const char*);
|
||||
int (*create_function_v2)(sqlite3*,const char*,int,int,void*,
|
||||
void (*xFunc)(sqlite3_context*,int,sqlite3_value**),
|
||||
void (*xStep)(sqlite3_context*,int,sqlite3_value**),
|
||||
void (*xFinal)(sqlite3_context*),
|
||||
void(*xDestroy)(void*));
|
||||
int (*db_config)(sqlite3*,int,...);
|
||||
sqlite3_mutex *(*db_mutex)(sqlite3*);
|
||||
int (*db_status)(sqlite3*,int,int*,int*,int);
|
||||
int (*extended_errcode)(sqlite3*);
|
||||
void (*log)(int,const char*,...);
|
||||
sqlite3_int64 (*soft_heap_limit64)(sqlite3_int64);
|
||||
const char *(*sourceid)(void);
|
||||
int (*stmt_status)(sqlite3_stmt*,int,int);
|
||||
int (*strnicmp)(const char*,const char*,int);
|
||||
int (*unlock_notify)(sqlite3*,void(*)(void**,int),void*);
|
||||
int (*wal_autocheckpoint)(sqlite3*,int);
|
||||
int (*wal_checkpoint)(sqlite3*,const char*);
|
||||
void *(*wal_hook)(sqlite3*,int(*)(void*,sqlite3*,const char*,int),void*);
|
||||
int (*blob_reopen)(sqlite3_blob*,sqlite3_int64);
|
||||
int (*vtab_config)(sqlite3*,int op,...);
|
||||
int (*vtab_on_conflict)(sqlite3*);
|
||||
/* Version 3.7.16 and later */
|
||||
int (*close_v2)(sqlite3*);
|
||||
const char *(*db_filename)(sqlite3*,const char*);
|
||||
int (*db_readonly)(sqlite3*,const char*);
|
||||
int (*db_release_memory)(sqlite3*);
|
||||
const char *(*errstr)(int);
|
||||
int (*stmt_busy)(sqlite3_stmt*);
|
||||
int (*stmt_readonly)(sqlite3_stmt*);
|
||||
int (*stricmp)(const char*,const char*);
|
||||
int (*uri_boolean)(const char*,const char*,int);
|
||||
sqlite3_int64 (*uri_int64)(const char*,const char*,sqlite3_int64);
|
||||
const char *(*uri_parameter)(const char*,const char*);
|
||||
char *(*xvsnprintf)(int,char*,const char*,va_list);
|
||||
int (*wal_checkpoint_v2)(sqlite3*,const char*,int,int*,int*);
|
||||
/* Version 3.8.7 and later */
|
||||
int (*auto_extension)(void(*)(void));
|
||||
int (*bind_blob64)(sqlite3_stmt*,int,const void*,sqlite3_uint64,
|
||||
void(*)(void*));
|
||||
int (*bind_text64)(sqlite3_stmt*,int,const char*,sqlite3_uint64,
|
||||
void(*)(void*),unsigned char);
|
||||
int (*cancel_auto_extension)(void(*)(void));
|
||||
int (*load_extension)(sqlite3*,const char*,const char*,char**);
|
||||
void *(*malloc64)(sqlite3_uint64);
|
||||
sqlite3_uint64 (*msize)(void*);
|
||||
void *(*realloc64)(void*,sqlite3_uint64);
|
||||
void (*reset_auto_extension)(void);
|
||||
void (*result_blob64)(sqlite3_context*,const void*,sqlite3_uint64,
|
||||
void(*)(void*));
|
||||
void (*result_text64)(sqlite3_context*,const char*,sqlite3_uint64,
|
||||
void(*)(void*), unsigned char);
|
||||
int (*strglob)(const char*,const char*);
|
||||
/* Version 3.8.11 and later */
|
||||
sqlite3_value *(*value_dup)(const sqlite3_value*);
|
||||
void (*value_free)(sqlite3_value*);
|
||||
int (*result_zeroblob64)(sqlite3_context*,sqlite3_uint64);
|
||||
int (*bind_zeroblob64)(sqlite3_stmt*, int, sqlite3_uint64);
|
||||
/* Version 3.9.0 and later */
|
||||
unsigned int (*value_subtype)(sqlite3_value*);
|
||||
void (*result_subtype)(sqlite3_context*,unsigned int);
|
||||
/* Version 3.10.0 and later */
|
||||
int (*status64)(int,sqlite3_int64*,sqlite3_int64*,int);
|
||||
int (*strlike)(const char*,const char*,unsigned int);
|
||||
int (*db_cacheflush)(sqlite3*);
|
||||
/* Version 3.12.0 and later */
|
||||
int (*system_errno)(sqlite3*);
|
||||
/* Version 3.14.0 and later */
|
||||
int (*trace_v2)(sqlite3*,unsigned,int(*)(unsigned,void*,void*,void*),void*);
|
||||
char *(*expanded_sql)(sqlite3_stmt*);
|
||||
/* Version 3.18.0 and later */
|
||||
void (*set_last_insert_rowid)(sqlite3*,sqlite3_int64);
|
||||
/* Version 3.20.0 and later */
|
||||
int (*prepare_v3)(sqlite3*,const char*,int,unsigned int,
|
||||
sqlite3_stmt**,const char**);
|
||||
int (*prepare16_v3)(sqlite3*,const void*,int,unsigned int,
|
||||
sqlite3_stmt**,const void**);
|
||||
int (*bind_pointer)(sqlite3_stmt*,int,void*,const char*,void(*)(void*));
|
||||
void (*result_pointer)(sqlite3_context*,void*,const char*,void(*)(void*));
|
||||
void *(*value_pointer)(sqlite3_value*,const char*);
|
||||
int (*vtab_nochange)(sqlite3_context*);
|
||||
int (*value_nochange)(sqlite3_value*);
|
||||
const char *(*vtab_collation)(sqlite3_index_info*,int);
|
||||
/* Version 3.24.0 and later */
|
||||
int (*keyword_count)(void);
|
||||
int (*keyword_name)(int,const char**,int*);
|
||||
int (*keyword_check)(const char*,int);
|
||||
sqlite3_str *(*str_new)(sqlite3*);
|
||||
char *(*str_finish)(sqlite3_str*);
|
||||
void (*str_appendf)(sqlite3_str*, const char *zFormat, ...);
|
||||
void (*str_vappendf)(sqlite3_str*, const char *zFormat, va_list);
|
||||
void (*str_append)(sqlite3_str*, const char *zIn, int N);
|
||||
void (*str_appendall)(sqlite3_str*, const char *zIn);
|
||||
void (*str_appendchar)(sqlite3_str*, int N, char C);
|
||||
void (*str_reset)(sqlite3_str*);
|
||||
int (*str_errcode)(sqlite3_str*);
|
||||
int (*str_length)(sqlite3_str*);
|
||||
char *(*str_value)(sqlite3_str*);
|
||||
/* Version 3.25.0 and later */
|
||||
int (*create_window_function)(sqlite3*,const char*,int,int,void*,
|
||||
void (*xStep)(sqlite3_context*,int,sqlite3_value**),
|
||||
void (*xFinal)(sqlite3_context*),
|
||||
void (*xValue)(sqlite3_context*),
|
||||
void (*xInv)(sqlite3_context*,int,sqlite3_value**),
|
||||
void(*xDestroy)(void*));
|
||||
/* Version 3.26.0 and later */
|
||||
const char *(*normalized_sql)(sqlite3_stmt*);
|
||||
/* Version 3.28.0 and later */
|
||||
int (*stmt_isexplain)(sqlite3_stmt*);
|
||||
int (*value_frombind)(sqlite3_value*);
|
||||
/* Version 3.30.0 and later */
|
||||
int (*drop_modules)(sqlite3*,const char**);
|
||||
/* Version 3.31.0 and later */
|
||||
sqlite3_int64 (*hard_heap_limit64)(sqlite3_int64);
|
||||
const char *(*uri_key)(const char*,int);
|
||||
const char *(*filename_database)(const char*);
|
||||
const char *(*filename_journal)(const char*);
|
||||
const char *(*filename_wal)(const char*);
|
||||
/* Version 3.32.0 and later */
|
||||
char *(*create_filename)(const char*,const char*,const char*,
|
||||
int,const char**);
|
||||
void (*free_filename)(char*);
|
||||
sqlite3_file *(*database_file_object)(const char*);
|
||||
/* Version 3.34.0 and later */
|
||||
int (*txn_state)(sqlite3*,const char*);
|
||||
/* Version 3.36.1 and later */
|
||||
sqlite3_int64 (*changes64)(sqlite3*);
|
||||
sqlite3_int64 (*total_changes64)(sqlite3*);
|
||||
/* Version 3.37.0 and later */
|
||||
int (*autovacuum_pages)(sqlite3*,
|
||||
unsigned int(*)(void*,const char*,unsigned int,unsigned int,unsigned int),
|
||||
void*, void(*)(void*));
|
||||
/* Version 3.38.0 and later */
|
||||
int (*error_offset)(sqlite3*);
|
||||
int (*vtab_rhs_value)(sqlite3_index_info*,int,sqlite3_value**);
|
||||
int (*vtab_distinct)(sqlite3_index_info*);
|
||||
int (*vtab_in)(sqlite3_index_info*,int,int);
|
||||
int (*vtab_in_first)(sqlite3_value*,sqlite3_value**);
|
||||
int (*vtab_in_next)(sqlite3_value*,sqlite3_value**);
|
||||
};
|
||||
|
||||
/*
|
||||
** This is the function signature used for all extension entry points. It
|
||||
** is also defined in the file "loadext.c".
|
||||
*/
|
||||
typedef int (*sqlite3_loadext_entry)(
|
||||
sqlite3 *db, /* Handle to the database. */
|
||||
char **pzErrMsg, /* Used to set error string on failure. */
|
||||
const sqlite3_api_routines *pThunk /* Extension API function pointers. */
|
||||
);
|
||||
|
||||
/*
|
||||
** The following macros redefine the API routines so that they are
|
||||
** redirected through the global sqlite3_api structure.
|
||||
**
|
||||
** This header file is also used by the loadext.c source file
|
||||
** (part of the main SQLite library - not an extension) so that
|
||||
** it can get access to the sqlite3_api_routines structure
|
||||
** definition. But the main library does not want to redefine
|
||||
** the API. So the redefinition macros are only valid if the
|
||||
** SQLITE_CORE macros is undefined.
|
||||
*/
|
||||
#if !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION)
|
||||
#define sqlite3_aggregate_context sqlite3_api->aggregate_context
|
||||
#ifndef SQLITE_OMIT_DEPRECATED
|
||||
#define sqlite3_aggregate_count sqlite3_api->aggregate_count
|
||||
#endif
|
||||
#define sqlite3_bind_blob sqlite3_api->bind_blob
|
||||
#define sqlite3_bind_double sqlite3_api->bind_double
|
||||
#define sqlite3_bind_int sqlite3_api->bind_int
|
||||
#define sqlite3_bind_int64 sqlite3_api->bind_int64
|
||||
#define sqlite3_bind_null sqlite3_api->bind_null
|
||||
#define sqlite3_bind_parameter_count sqlite3_api->bind_parameter_count
|
||||
#define sqlite3_bind_parameter_index sqlite3_api->bind_parameter_index
|
||||
#define sqlite3_bind_parameter_name sqlite3_api->bind_parameter_name
|
||||
#define sqlite3_bind_text sqlite3_api->bind_text
|
||||
#define sqlite3_bind_text16 sqlite3_api->bind_text16
|
||||
#define sqlite3_bind_value sqlite3_api->bind_value
|
||||
#define sqlite3_busy_handler sqlite3_api->busy_handler
|
||||
#define sqlite3_busy_timeout sqlite3_api->busy_timeout
|
||||
#define sqlite3_changes sqlite3_api->changes
|
||||
#define sqlite3_close sqlite3_api->close
|
||||
#define sqlite3_collation_needed sqlite3_api->collation_needed
|
||||
#define sqlite3_collation_needed16 sqlite3_api->collation_needed16
|
||||
#define sqlite3_column_blob sqlite3_api->column_blob
|
||||
#define sqlite3_column_bytes sqlite3_api->column_bytes
|
||||
#define sqlite3_column_bytes16 sqlite3_api->column_bytes16
|
||||
#define sqlite3_column_count sqlite3_api->column_count
|
||||
#define sqlite3_column_database_name sqlite3_api->column_database_name
|
||||
#define sqlite3_column_database_name16 sqlite3_api->column_database_name16
|
||||
#define sqlite3_column_decltype sqlite3_api->column_decltype
|
||||
#define sqlite3_column_decltype16 sqlite3_api->column_decltype16
|
||||
#define sqlite3_column_double sqlite3_api->column_double
|
||||
#define sqlite3_column_int sqlite3_api->column_int
|
||||
#define sqlite3_column_int64 sqlite3_api->column_int64
|
||||
#define sqlite3_column_name sqlite3_api->column_name
|
||||
#define sqlite3_column_name16 sqlite3_api->column_name16
|
||||
#define sqlite3_column_origin_name sqlite3_api->column_origin_name
|
||||
#define sqlite3_column_origin_name16 sqlite3_api->column_origin_name16
|
||||
#define sqlite3_column_table_name sqlite3_api->column_table_name
|
||||
#define sqlite3_column_table_name16 sqlite3_api->column_table_name16
|
||||
#define sqlite3_column_text sqlite3_api->column_text
|
||||
#define sqlite3_column_text16 sqlite3_api->column_text16
|
||||
#define sqlite3_column_type sqlite3_api->column_type
|
||||
#define sqlite3_column_value sqlite3_api->column_value
|
||||
#define sqlite3_commit_hook sqlite3_api->commit_hook
|
||||
#define sqlite3_complete sqlite3_api->complete
|
||||
#define sqlite3_complete16 sqlite3_api->complete16
|
||||
#define sqlite3_create_collation sqlite3_api->create_collation
|
||||
#define sqlite3_create_collation16 sqlite3_api->create_collation16
|
||||
#define sqlite3_create_function sqlite3_api->create_function
|
||||
#define sqlite3_create_function16 sqlite3_api->create_function16
|
||||
#define sqlite3_create_module sqlite3_api->create_module
|
||||
#define sqlite3_create_module_v2 sqlite3_api->create_module_v2
|
||||
#define sqlite3_data_count sqlite3_api->data_count
|
||||
#define sqlite3_db_handle sqlite3_api->db_handle
|
||||
#define sqlite3_declare_vtab sqlite3_api->declare_vtab
|
||||
#define sqlite3_enable_shared_cache sqlite3_api->enable_shared_cache
|
||||
#define sqlite3_errcode sqlite3_api->errcode
|
||||
#define sqlite3_errmsg sqlite3_api->errmsg
|
||||
#define sqlite3_errmsg16 sqlite3_api->errmsg16
|
||||
#define sqlite3_exec sqlite3_api->exec
|
||||
#ifndef SQLITE_OMIT_DEPRECATED
|
||||
#define sqlite3_expired sqlite3_api->expired
|
||||
#endif
|
||||
#define sqlite3_finalize sqlite3_api->finalize
|
||||
#define sqlite3_free sqlite3_api->free
|
||||
#define sqlite3_free_table sqlite3_api->free_table
|
||||
#define sqlite3_get_autocommit sqlite3_api->get_autocommit
|
||||
#define sqlite3_get_auxdata sqlite3_api->get_auxdata
|
||||
#define sqlite3_get_table sqlite3_api->get_table
|
||||
#ifndef SQLITE_OMIT_DEPRECATED
|
||||
#define sqlite3_global_recover sqlite3_api->global_recover
|
||||
#endif
|
||||
#define sqlite3_interrupt sqlite3_api->interruptx
|
||||
#define sqlite3_last_insert_rowid sqlite3_api->last_insert_rowid
|
||||
#define sqlite3_libversion sqlite3_api->libversion
|
||||
#define sqlite3_libversion_number sqlite3_api->libversion_number
|
||||
#define sqlite3_malloc sqlite3_api->malloc
|
||||
#define sqlite3_mprintf sqlite3_api->mprintf
|
||||
#define sqlite3_open sqlite3_api->open
|
||||
#define sqlite3_open16 sqlite3_api->open16
|
||||
#define sqlite3_prepare sqlite3_api->prepare
|
||||
#define sqlite3_prepare16 sqlite3_api->prepare16
|
||||
#define sqlite3_prepare_v2 sqlite3_api->prepare_v2
|
||||
#define sqlite3_prepare16_v2 sqlite3_api->prepare16_v2
|
||||
#define sqlite3_profile sqlite3_api->profile
|
||||
#define sqlite3_progress_handler sqlite3_api->progress_handler
|
||||
#define sqlite3_realloc sqlite3_api->realloc
|
||||
#define sqlite3_reset sqlite3_api->reset
|
||||
#define sqlite3_result_blob sqlite3_api->result_blob
|
||||
#define sqlite3_result_double sqlite3_api->result_double
|
||||
#define sqlite3_result_error sqlite3_api->result_error
|
||||
#define sqlite3_result_error16 sqlite3_api->result_error16
|
||||
#define sqlite3_result_int sqlite3_api->result_int
|
||||
#define sqlite3_result_int64 sqlite3_api->result_int64
|
||||
#define sqlite3_result_null sqlite3_api->result_null
|
||||
#define sqlite3_result_text sqlite3_api->result_text
|
||||
#define sqlite3_result_text16 sqlite3_api->result_text16
|
||||
#define sqlite3_result_text16be sqlite3_api->result_text16be
|
||||
#define sqlite3_result_text16le sqlite3_api->result_text16le
|
||||
#define sqlite3_result_value sqlite3_api->result_value
|
||||
#define sqlite3_rollback_hook sqlite3_api->rollback_hook
|
||||
#define sqlite3_set_authorizer sqlite3_api->set_authorizer
|
||||
#define sqlite3_set_auxdata sqlite3_api->set_auxdata
|
||||
#define sqlite3_snprintf sqlite3_api->xsnprintf
|
||||
#define sqlite3_step sqlite3_api->step
|
||||
#define sqlite3_table_column_metadata sqlite3_api->table_column_metadata
|
||||
#define sqlite3_thread_cleanup sqlite3_api->thread_cleanup
|
||||
#define sqlite3_total_changes sqlite3_api->total_changes
|
||||
#define sqlite3_trace sqlite3_api->trace
|
||||
#ifndef SQLITE_OMIT_DEPRECATED
|
||||
#define sqlite3_transfer_bindings sqlite3_api->transfer_bindings
|
||||
#endif
|
||||
#define sqlite3_update_hook sqlite3_api->update_hook
|
||||
#define sqlite3_user_data sqlite3_api->user_data
|
||||
#define sqlite3_value_blob sqlite3_api->value_blob
|
||||
#define sqlite3_value_bytes sqlite3_api->value_bytes
|
||||
#define sqlite3_value_bytes16 sqlite3_api->value_bytes16
|
||||
#define sqlite3_value_double sqlite3_api->value_double
|
||||
#define sqlite3_value_int sqlite3_api->value_int
|
||||
#define sqlite3_value_int64 sqlite3_api->value_int64
|
||||
#define sqlite3_value_numeric_type sqlite3_api->value_numeric_type
|
||||
#define sqlite3_value_text sqlite3_api->value_text
|
||||
#define sqlite3_value_text16 sqlite3_api->value_text16
|
||||
#define sqlite3_value_text16be sqlite3_api->value_text16be
|
||||
#define sqlite3_value_text16le sqlite3_api->value_text16le
|
||||
#define sqlite3_value_type sqlite3_api->value_type
|
||||
#define sqlite3_vmprintf sqlite3_api->vmprintf
|
||||
#define sqlite3_vsnprintf sqlite3_api->xvsnprintf
|
||||
#define sqlite3_overload_function sqlite3_api->overload_function
|
||||
#define sqlite3_prepare_v2 sqlite3_api->prepare_v2
|
||||
#define sqlite3_prepare16_v2 sqlite3_api->prepare16_v2
|
||||
#define sqlite3_clear_bindings sqlite3_api->clear_bindings
|
||||
#define sqlite3_bind_zeroblob sqlite3_api->bind_zeroblob
|
||||
#define sqlite3_blob_bytes sqlite3_api->blob_bytes
|
||||
#define sqlite3_blob_close sqlite3_api->blob_close
|
||||
#define sqlite3_blob_open sqlite3_api->blob_open
|
||||
#define sqlite3_blob_read sqlite3_api->blob_read
|
||||
#define sqlite3_blob_write sqlite3_api->blob_write
|
||||
#define sqlite3_create_collation_v2 sqlite3_api->create_collation_v2
|
||||
#define sqlite3_file_control sqlite3_api->file_control
|
||||
#define sqlite3_memory_highwater sqlite3_api->memory_highwater
|
||||
#define sqlite3_memory_used sqlite3_api->memory_used
|
||||
#define sqlite3_mutex_alloc sqlite3_api->mutex_alloc
|
||||
#define sqlite3_mutex_enter sqlite3_api->mutex_enter
|
||||
#define sqlite3_mutex_free sqlite3_api->mutex_free
|
||||
#define sqlite3_mutex_leave sqlite3_api->mutex_leave
|
||||
#define sqlite3_mutex_try sqlite3_api->mutex_try
|
||||
#define sqlite3_open_v2 sqlite3_api->open_v2
|
||||
#define sqlite3_release_memory sqlite3_api->release_memory
|
||||
#define sqlite3_result_error_nomem sqlite3_api->result_error_nomem
|
||||
#define sqlite3_result_error_toobig sqlite3_api->result_error_toobig
|
||||
#define sqlite3_sleep sqlite3_api->sleep
|
||||
#define sqlite3_soft_heap_limit sqlite3_api->soft_heap_limit
|
||||
#define sqlite3_vfs_find sqlite3_api->vfs_find
|
||||
#define sqlite3_vfs_register sqlite3_api->vfs_register
|
||||
#define sqlite3_vfs_unregister sqlite3_api->vfs_unregister
|
||||
#define sqlite3_threadsafe sqlite3_api->xthreadsafe
|
||||
#define sqlite3_result_zeroblob sqlite3_api->result_zeroblob
|
||||
#define sqlite3_result_error_code sqlite3_api->result_error_code
|
||||
#define sqlite3_test_control sqlite3_api->test_control
|
||||
#define sqlite3_randomness sqlite3_api->randomness
|
||||
#define sqlite3_context_db_handle sqlite3_api->context_db_handle
|
||||
#define sqlite3_extended_result_codes sqlite3_api->extended_result_codes
|
||||
#define sqlite3_limit sqlite3_api->limit
|
||||
#define sqlite3_next_stmt sqlite3_api->next_stmt
|
||||
#define sqlite3_sql sqlite3_api->sql
|
||||
#define sqlite3_status sqlite3_api->status
|
||||
#define sqlite3_backup_finish sqlite3_api->backup_finish
|
||||
#define sqlite3_backup_init sqlite3_api->backup_init
|
||||
#define sqlite3_backup_pagecount sqlite3_api->backup_pagecount
|
||||
#define sqlite3_backup_remaining sqlite3_api->backup_remaining
|
||||
#define sqlite3_backup_step sqlite3_api->backup_step
|
||||
#define sqlite3_compileoption_get sqlite3_api->compileoption_get
|
||||
#define sqlite3_compileoption_used sqlite3_api->compileoption_used
|
||||
#define sqlite3_create_function_v2 sqlite3_api->create_function_v2
|
||||
#define sqlite3_db_config sqlite3_api->db_config
|
||||
#define sqlite3_db_mutex sqlite3_api->db_mutex
|
||||
#define sqlite3_db_status sqlite3_api->db_status
|
||||
#define sqlite3_extended_errcode sqlite3_api->extended_errcode
|
||||
#define sqlite3_log sqlite3_api->log
|
||||
#define sqlite3_soft_heap_limit64 sqlite3_api->soft_heap_limit64
|
||||
#define sqlite3_sourceid sqlite3_api->sourceid
|
||||
#define sqlite3_stmt_status sqlite3_api->stmt_status
|
||||
#define sqlite3_strnicmp sqlite3_api->strnicmp
|
||||
#define sqlite3_unlock_notify sqlite3_api->unlock_notify
|
||||
#define sqlite3_wal_autocheckpoint sqlite3_api->wal_autocheckpoint
|
||||
#define sqlite3_wal_checkpoint sqlite3_api->wal_checkpoint
|
||||
#define sqlite3_wal_hook sqlite3_api->wal_hook
|
||||
#define sqlite3_blob_reopen sqlite3_api->blob_reopen
|
||||
#define sqlite3_vtab_config sqlite3_api->vtab_config
|
||||
#define sqlite3_vtab_on_conflict sqlite3_api->vtab_on_conflict
|
||||
/* Version 3.7.16 and later */
|
||||
#define sqlite3_close_v2 sqlite3_api->close_v2
|
||||
#define sqlite3_db_filename sqlite3_api->db_filename
|
||||
#define sqlite3_db_readonly sqlite3_api->db_readonly
|
||||
#define sqlite3_db_release_memory sqlite3_api->db_release_memory
|
||||
#define sqlite3_errstr sqlite3_api->errstr
|
||||
#define sqlite3_stmt_busy sqlite3_api->stmt_busy
|
||||
#define sqlite3_stmt_readonly sqlite3_api->stmt_readonly
|
||||
#define sqlite3_stricmp sqlite3_api->stricmp
|
||||
#define sqlite3_uri_boolean sqlite3_api->uri_boolean
|
||||
#define sqlite3_uri_int64 sqlite3_api->uri_int64
|
||||
#define sqlite3_uri_parameter sqlite3_api->uri_parameter
|
||||
#define sqlite3_uri_vsnprintf sqlite3_api->xvsnprintf
|
||||
#define sqlite3_wal_checkpoint_v2 sqlite3_api->wal_checkpoint_v2
|
||||
/* Version 3.8.7 and later */
|
||||
#define sqlite3_auto_extension sqlite3_api->auto_extension
|
||||
#define sqlite3_bind_blob64 sqlite3_api->bind_blob64
|
||||
#define sqlite3_bind_text64 sqlite3_api->bind_text64
|
||||
#define sqlite3_cancel_auto_extension sqlite3_api->cancel_auto_extension
|
||||
#define sqlite3_load_extension sqlite3_api->load_extension
|
||||
#define sqlite3_malloc64 sqlite3_api->malloc64
|
||||
#define sqlite3_msize sqlite3_api->msize
|
||||
#define sqlite3_realloc64 sqlite3_api->realloc64
|
||||
#define sqlite3_reset_auto_extension sqlite3_api->reset_auto_extension
|
||||
#define sqlite3_result_blob64 sqlite3_api->result_blob64
|
||||
#define sqlite3_result_text64 sqlite3_api->result_text64
|
||||
#define sqlite3_strglob sqlite3_api->strglob
|
||||
/* Version 3.8.11 and later */
|
||||
#define sqlite3_value_dup sqlite3_api->value_dup
|
||||
#define sqlite3_value_free sqlite3_api->value_free
|
||||
#define sqlite3_result_zeroblob64 sqlite3_api->result_zeroblob64
|
||||
#define sqlite3_bind_zeroblob64 sqlite3_api->bind_zeroblob64
|
||||
/* Version 3.9.0 and later */
|
||||
#define sqlite3_value_subtype sqlite3_api->value_subtype
|
||||
#define sqlite3_result_subtype sqlite3_api->result_subtype
|
||||
/* Version 3.10.0 and later */
|
||||
#define sqlite3_status64 sqlite3_api->status64
|
||||
#define sqlite3_strlike sqlite3_api->strlike
|
||||
#define sqlite3_db_cacheflush sqlite3_api->db_cacheflush
|
||||
/* Version 3.12.0 and later */
|
||||
#define sqlite3_system_errno sqlite3_api->system_errno
|
||||
/* Version 3.14.0 and later */
|
||||
#define sqlite3_trace_v2 sqlite3_api->trace_v2
|
||||
#define sqlite3_expanded_sql sqlite3_api->expanded_sql
|
||||
/* Version 3.18.0 and later */
|
||||
#define sqlite3_set_last_insert_rowid sqlite3_api->set_last_insert_rowid
|
||||
/* Version 3.20.0 and later */
|
||||
#define sqlite3_prepare_v3 sqlite3_api->prepare_v3
|
||||
#define sqlite3_prepare16_v3 sqlite3_api->prepare16_v3
|
||||
#define sqlite3_bind_pointer sqlite3_api->bind_pointer
|
||||
#define sqlite3_result_pointer sqlite3_api->result_pointer
|
||||
#define sqlite3_value_pointer sqlite3_api->value_pointer
|
||||
/* Version 3.22.0 and later */
|
||||
#define sqlite3_vtab_nochange sqlite3_api->vtab_nochange
|
||||
#define sqlite3_value_nochange sqlite3_api->value_nochange
|
||||
#define sqlite3_vtab_collation sqlite3_api->vtab_collation
|
||||
/* Version 3.24.0 and later */
|
||||
#define sqlite3_keyword_count sqlite3_api->keyword_count
|
||||
#define sqlite3_keyword_name sqlite3_api->keyword_name
|
||||
#define sqlite3_keyword_check sqlite3_api->keyword_check
|
||||
#define sqlite3_str_new sqlite3_api->str_new
|
||||
#define sqlite3_str_finish sqlite3_api->str_finish
|
||||
#define sqlite3_str_appendf sqlite3_api->str_appendf
|
||||
#define sqlite3_str_vappendf sqlite3_api->str_vappendf
|
||||
#define sqlite3_str_append sqlite3_api->str_append
|
||||
#define sqlite3_str_appendall sqlite3_api->str_appendall
|
||||
#define sqlite3_str_appendchar sqlite3_api->str_appendchar
|
||||
#define sqlite3_str_reset sqlite3_api->str_reset
|
||||
#define sqlite3_str_errcode sqlite3_api->str_errcode
|
||||
#define sqlite3_str_length sqlite3_api->str_length
|
||||
#define sqlite3_str_value sqlite3_api->str_value
|
||||
/* Version 3.25.0 and later */
|
||||
#define sqlite3_create_window_function sqlite3_api->create_window_function
|
||||
/* Version 3.26.0 and later */
|
||||
#define sqlite3_normalized_sql sqlite3_api->normalized_sql
|
||||
/* Version 3.28.0 and later */
|
||||
#define sqlite3_stmt_isexplain sqlite3_api->stmt_isexplain
|
||||
#define sqlite3_value_frombind sqlite3_api->value_frombind
|
||||
/* Version 3.30.0 and later */
|
||||
#define sqlite3_drop_modules sqlite3_api->drop_modules
|
||||
/* Version 3.31.0 and later */
|
||||
#define sqlite3_hard_heap_limit64 sqlite3_api->hard_heap_limit64
|
||||
#define sqlite3_uri_key sqlite3_api->uri_key
|
||||
#define sqlite3_filename_database sqlite3_api->filename_database
|
||||
#define sqlite3_filename_journal sqlite3_api->filename_journal
|
||||
#define sqlite3_filename_wal sqlite3_api->filename_wal
|
||||
/* Version 3.32.0 and later */
|
||||
#define sqlite3_create_filename sqlite3_api->create_filename
|
||||
#define sqlite3_free_filename sqlite3_api->free_filename
|
||||
#define sqlite3_database_file_object sqlite3_api->database_file_object
|
||||
/* Version 3.34.0 and later */
|
||||
#define sqlite3_txn_state sqlite3_api->txn_state
|
||||
/* Version 3.36.1 and later */
|
||||
#define sqlite3_changes64 sqlite3_api->changes64
|
||||
#define sqlite3_total_changes64 sqlite3_api->total_changes64
|
||||
/* Version 3.37.0 and later */
|
||||
#define sqlite3_autovacuum_pages sqlite3_api->autovacuum_pages
|
||||
/* Version 3.38.0 and later */
|
||||
#define sqlite3_error_offset sqlite3_api->error_offset
|
||||
#define sqlite3_vtab_rhs_value sqlite3_api->vtab_rhs_value
|
||||
#define sqlite3_vtab_distinct sqlite3_api->vtab_distinct
|
||||
#define sqlite3_vtab_in sqlite3_api->vtab_in
|
||||
#define sqlite3_vtab_in_first sqlite3_api->vtab_in_first
|
||||
#define sqlite3_vtab_in_next sqlite3_api->vtab_in_next
|
||||
#endif /* !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) */
|
||||
|
||||
#if !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION)
|
||||
/* This case when the file really is being compiled as a loadable
|
||||
** extension */
|
||||
# define SQLITE_EXTENSION_INIT1 const sqlite3_api_routines *sqlite3_api=0;
|
||||
# define SQLITE_EXTENSION_INIT2(v) sqlite3_api=v;
|
||||
# define SQLITE_EXTENSION_INIT3 \
|
||||
extern const sqlite3_api_routines *sqlite3_api;
|
||||
#else
|
||||
/* This case when the file is being statically linked into the
|
||||
** application */
|
||||
# define SQLITE_EXTENSION_INIT1 /*no-op*/
|
||||
# define SQLITE_EXTENSION_INIT2(v) (void)v; /* unused parameter */
|
||||
# define SQLITE_EXTENSION_INIT3 /*no-op*/
|
||||
#endif
|
||||
|
||||
#endif /* SQLITE3EXT_H */
|
||||
#else // USE_LIBSQLITE3
|
||||
// If users really want to link against the system sqlite3 we
|
||||
// need to make this file a noop.
|
||||
#endif
|
|
@ -0,0 +1,37 @@
|
|||
// Copyright (C) 2019 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !cgo
|
||||
|
||||
package sqlite3
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"database/sql/driver"
|
||||
"errors"
|
||||
)
|
||||
|
||||
var errorMsg = errors.New("Binary was compiled with 'CGO_ENABLED=0', go-sqlite3 requires cgo to work. This is a stub")
|
||||
|
||||
func init() {
|
||||
sql.Register("sqlite3", &SQLiteDriver{})
|
||||
}
|
||||
|
||||
type (
|
||||
SQLiteDriver struct {
|
||||
Extensions []string
|
||||
ConnectHook func(*SQLiteConn) error
|
||||
}
|
||||
SQLiteConn struct{}
|
||||
)
|
||||
|
||||
func (SQLiteDriver) Open(s string) (driver.Conn, error) { return nil, errorMsg }
|
||||
func (c *SQLiteConn) RegisterAggregator(string, interface{}, bool) error { return errorMsg }
|
||||
func (c *SQLiteConn) RegisterAuthorizer(func(int, string, string, string) int) {}
|
||||
func (c *SQLiteConn) RegisterCollation(string, func(string, string) int) error { return errorMsg }
|
||||
func (c *SQLiteConn) RegisterCommitHook(func() int) {}
|
||||
func (c *SQLiteConn) RegisterFunc(string, interface{}, bool) error { return errorMsg }
|
||||
func (c *SQLiteConn) RegisterRollbackHook(func()) {}
|
||||
func (c *SQLiteConn) RegisterUpdateHook(func(int, string, string, int64)) {}
|
|
@ -0,0 +1,19 @@
|
|||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, build with `go test -c`
|
||||
*.test
|
||||
*.pprof
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
.DS_Store
|
||||
fuzz/corpus
|
||||
fuzz/crashers
|
||||
fuzz/suppressions
|
||||
fuzz/fuzz-fuzz.zip
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2019 Yusuke Inuzuka
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -0,0 +1,16 @@
|
|||
.PHONY: test fuzz
|
||||
|
||||
test:
|
||||
go test -coverprofile=profile.out -coverpkg=github.com/yuin/goldmark,github.com/yuin/goldmark/ast,github.com/yuin/goldmark/extension,github.com/yuin/goldmark/extension/ast,github.com/yuin/goldmark/parser,github.com/yuin/goldmark/renderer,github.com/yuin/goldmark/renderer/html,github.com/yuin/goldmark/text,github.com/yuin/goldmark/util ./...
|
||||
|
||||
cov: test
|
||||
go tool cover -html=profile.out
|
||||
|
||||
fuzz:
|
||||
which go-fuzz > /dev/null 2>&1 || (GO111MODULE=off go get -u github.com/dvyukov/go-fuzz/go-fuzz github.com/dvyukov/go-fuzz/go-fuzz-build; GO111MODULE=off go get -d github.com/dvyukov/go-fuzz-corpus; true)
|
||||
rm -rf ./fuzz/corpus
|
||||
rm -rf ./fuzz/crashers
|
||||
rm -rf ./fuzz/suppressions
|
||||
rm -f ./fuzz/fuzz-fuzz.zip
|
||||
cd ./fuzz && GO111MODULE=off go-fuzz-build
|
||||
cd ./fuzz && go-fuzz
|
|
@ -0,0 +1,498 @@
|
|||
goldmark
|
||||
==========================================
|
||||
|
||||
[![https://pkg.go.dev/github.com/yuin/goldmark](https://pkg.go.dev/badge/github.com/yuin/goldmark.svg)](https://pkg.go.dev/github.com/yuin/goldmark)
|
||||
[![https://github.com/yuin/goldmark/actions?query=workflow:test](https://github.com/yuin/goldmark/workflows/test/badge.svg?branch=master&event=push)](https://github.com/yuin/goldmark/actions?query=workflow:test)
|
||||
[![https://coveralls.io/github/yuin/goldmark](https://coveralls.io/repos/github/yuin/goldmark/badge.svg?branch=master)](https://coveralls.io/github/yuin/goldmark)
|
||||
[![https://goreportcard.com/report/github.com/yuin/goldmark](https://goreportcard.com/badge/github.com/yuin/goldmark)](https://goreportcard.com/report/github.com/yuin/goldmark)
|
||||
|
||||
> A Markdown parser written in Go. Easy to extend, standards-compliant, well-structured.
|
||||
|
||||
goldmark is compliant with CommonMark 0.30.
|
||||
|
||||
Motivation
|
||||
----------------------
|
||||
I needed a Markdown parser for Go that satisfies the following requirements:
|
||||
|
||||
- Easy to extend.
|
||||
- Markdown is poor in document expressions compared to other light markup languages such as reStructuredText.
|
||||
- We have extensions to the Markdown syntax, e.g. PHP Markdown Extra, GitHub Flavored Markdown.
|
||||
- Standards-compliant.
|
||||
- Markdown has many dialects.
|
||||
- GitHub-Flavored Markdown is widely used and is based upon CommonMark, effectively mooting the question of whether or not CommonMark is an ideal specification.
|
||||
- CommonMark is complicated and hard to implement.
|
||||
- Well-structured.
|
||||
- AST-based; preserves source position of nodes.
|
||||
- Written in pure Go.
|
||||
|
||||
[golang-commonmark](https://gitlab.com/golang-commonmark/markdown) may be a good choice, but it seems to be a copy of [markdown-it](https://github.com/markdown-it).
|
||||
|
||||
[blackfriday.v2](https://github.com/russross/blackfriday/tree/v2) is a fast and widely-used implementation, but is not CommonMark-compliant and cannot be extended from outside of the package, since its AST uses structs instead of interfaces.
|
||||
|
||||
Furthermore, its behavior differs from other implementations in some cases, especially regarding lists: [Deep nested lists don't output correctly #329](https://github.com/russross/blackfriday/issues/329), [List block cannot have a second line #244](https://github.com/russross/blackfriday/issues/244), etc.
|
||||
|
||||
This behavior sometimes causes problems. If you migrate your Markdown text from GitHub to blackfriday-based wikis, many lists will immediately be broken.
|
||||
|
||||
As mentioned above, CommonMark is complicated and hard to implement, so Markdown parsers based on CommonMark are few and far between.
|
||||
|
||||
Features
|
||||
----------------------
|
||||
|
||||
- **Standards-compliant.** goldmark is fully compliant with the latest [CommonMark](https://commonmark.org/) specification.
|
||||
- **Extensible.** Do you want to add a `@username` mention syntax to Markdown?
|
||||
You can easily do so in goldmark. You can add your AST nodes,
|
||||
parsers for block-level elements, parsers for inline-level elements,
|
||||
transformers for paragraphs, transformers for the whole AST structure, and
|
||||
renderers.
|
||||
- **Performance.** goldmark's performance is on par with that of cmark,
|
||||
the CommonMark reference implementation written in C.
|
||||
- **Robust.** goldmark is tested with [go-fuzz](https://github.com/dvyukov/go-fuzz), a fuzz testing tool.
|
||||
- **Built-in extensions.** goldmark ships with common extensions like tables, strikethrough,
|
||||
task lists, and definition lists.
|
||||
- **Depends only on standard libraries.**
|
||||
|
||||
Installation
|
||||
----------------------
|
||||
```bash
|
||||
$ go get github.com/yuin/goldmark
|
||||
```
|
||||
|
||||
|
||||
Usage
|
||||
----------------------
|
||||
Import packages:
|
||||
|
||||
```go
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/yuin/goldmark"
|
||||
)
|
||||
```
|
||||
|
||||
|
||||
Convert Markdown documents with the CommonMark-compliant mode:
|
||||
|
||||
```go
|
||||
var buf bytes.Buffer
|
||||
if err := goldmark.Convert(source, &buf); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
With options
|
||||
------------------------------
|
||||
|
||||
```go
|
||||
var buf bytes.Buffer
|
||||
if err := goldmark.Convert(source, &buf, parser.WithContext(ctx)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
| Functional option | Type | Description |
|
||||
| ----------------- | ---- | ----------- |
|
||||
| `parser.WithContext` | A `parser.Context` | Context for the parsing phase. |
|
||||
|
||||
Context options
|
||||
----------------------
|
||||
|
||||
| Functional option | Type | Description |
|
||||
| ----------------- | ---- | ----------- |
|
||||
| `parser.WithIDs` | A `parser.IDs` | `IDs` allows you to change logics that are related to element id(ex: Auto heading id generation). |
|
||||
|
||||
|
||||
Custom parser and renderer
|
||||
--------------------------
|
||||
```go
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/yuin/goldmark"
|
||||
"github.com/yuin/goldmark/extension"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
)
|
||||
|
||||
md := goldmark.New(
|
||||
goldmark.WithExtensions(extension.GFM),
|
||||
goldmark.WithParserOptions(
|
||||
parser.WithAutoHeadingID(),
|
||||
),
|
||||
goldmark.WithRendererOptions(
|
||||
html.WithHardWraps(),
|
||||
html.WithXHTML(),
|
||||
),
|
||||
)
|
||||
var buf bytes.Buffer
|
||||
if err := md.Convert(source, &buf); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
| Functional option | Type | Description |
|
||||
| ----------------- | ---- | ----------- |
|
||||
| `goldmark.WithParser` | `parser.Parser` | This option must be passed before `goldmark.WithParserOptions` and `goldmark.WithExtensions` |
|
||||
| `goldmark.WithRenderer` | `renderer.Renderer` | This option must be passed before `goldmark.WithRendererOptions` and `goldmark.WithExtensions` |
|
||||
| `goldmark.WithParserOptions` | `...parser.Option` | |
|
||||
| `goldmark.WithRendererOptions` | `...renderer.Option` | |
|
||||
| `goldmark.WithExtensions` | `...goldmark.Extender` | |
|
||||
|
||||
Parser and Renderer options
|
||||
------------------------------
|
||||
|
||||
### Parser options
|
||||
|
||||
| Functional option | Type | Description |
|
||||
| ----------------- | ---- | ----------- |
|
||||
| `parser.WithBlockParsers` | A `util.PrioritizedSlice` whose elements are `parser.BlockParser` | Parsers for parsing block level elements. |
|
||||
| `parser.WithInlineParsers` | A `util.PrioritizedSlice` whose elements are `parser.InlineParser` | Parsers for parsing inline level elements. |
|
||||
| `parser.WithParagraphTransformers` | A `util.PrioritizedSlice` whose elements are `parser.ParagraphTransformer` | Transformers for transforming paragraph nodes. |
|
||||
| `parser.WithASTTransformers` | A `util.PrioritizedSlice` whose elements are `parser.ASTTransformer` | Transformers for transforming an AST. |
|
||||
| `parser.WithAutoHeadingID` | `-` | Enables auto heading ids. |
|
||||
| `parser.WithAttribute` | `-` | Enables custom attributes. Currently only headings supports attributes. |
|
||||
|
||||
### HTML Renderer options
|
||||
|
||||
| Functional option | Type | Description |
|
||||
| ----------------- | ---- | ----------- |
|
||||
| `html.WithWriter` | `html.Writer` | `html.Writer` for writing contents to an `io.Writer`. |
|
||||
| `html.WithHardWraps` | `-` | Render newlines as `<br>`.|
|
||||
| `html.WithXHTML` | `-` | Render as XHTML. |
|
||||
| `html.WithUnsafe` | `-` | By default, goldmark does not render raw HTML or potentially dangerous links. With this option, goldmark renders such content as written. |
|
||||
|
||||
### Built-in extensions
|
||||
|
||||
- `extension.Table`
|
||||
- [GitHub Flavored Markdown: Tables](https://github.github.com/gfm/#tables-extension-)
|
||||
- `extension.Strikethrough`
|
||||
- [GitHub Flavored Markdown: Strikethrough](https://github.github.com/gfm/#strikethrough-extension-)
|
||||
- `extension.Linkify`
|
||||
- [GitHub Flavored Markdown: Autolinks](https://github.github.com/gfm/#autolinks-extension-)
|
||||
- `extension.TaskList`
|
||||
- [GitHub Flavored Markdown: Task list items](https://github.github.com/gfm/#task-list-items-extension-)
|
||||
- `extension.GFM`
|
||||
- This extension enables Table, Strikethrough, Linkify and TaskList.
|
||||
- This extension does not filter tags defined in [6.11: Disallowed Raw HTML (extension)](https://github.github.com/gfm/#disallowed-raw-html-extension-).
|
||||
If you need to filter HTML tags, see [Security](#security).
|
||||
- If you need to parse github emojis, you can use [goldmark-emoji](https://github.com/yuin/goldmark-emoji) extension.
|
||||
- `extension.DefinitionList`
|
||||
- [PHP Markdown Extra: Definition lists](https://michelf.ca/projects/php-markdown/extra/#def-list)
|
||||
- `extension.Footnote`
|
||||
- [PHP Markdown Extra: Footnotes](https://michelf.ca/projects/php-markdown/extra/#footnotes)
|
||||
- `extension.Typographer`
|
||||
- This extension substitutes punctuations with typographic entities like [smartypants](https://daringfireball.net/projects/smartypants/).
|
||||
|
||||
### Attributes
|
||||
The `parser.WithAttribute` option allows you to define attributes on some elements.
|
||||
|
||||
Currently only headings support attributes.
|
||||
|
||||
**Attributes are being discussed in the
|
||||
[CommonMark forum](https://talk.commonmark.org/t/consistent-attribute-syntax/272).
|
||||
This syntax may possibly change in the future.**
|
||||
|
||||
|
||||
#### Headings
|
||||
|
||||
```
|
||||
## heading ## {#id .className attrName=attrValue class="class1 class2"}
|
||||
|
||||
## heading {#id .className attrName=attrValue class="class1 class2"}
|
||||
```
|
||||
|
||||
```
|
||||
heading {#id .className attrName=attrValue}
|
||||
============
|
||||
```
|
||||
|
||||
### Table extension
|
||||
The Table extension implements [Table(extension)](https://github.github.com/gfm/#tables-extension-), as
|
||||
defined in [GitHub Flavored Markdown Spec](https://github.github.com/gfm/).
|
||||
|
||||
Specs are defined for XHTML, so specs use some deprecated attributes for HTML5.
|
||||
|
||||
You can override alignment rendering method via options.
|
||||
|
||||
| Functional option | Type | Description |
|
||||
| ----------------- | ---- | ----------- |
|
||||
| `extension.WithTableCellAlignMethod` | `extension.TableCellAlignMethod` | Option indicates how are table cells aligned. |
|
||||
|
||||
### Typographer extension
|
||||
|
||||
The Typographer extension translates plain ASCII punctuation characters into typographic-punctuation HTML entities.
|
||||
|
||||
Default substitutions are:
|
||||
|
||||
| Punctuation | Default entity |
|
||||
| ------------ | ---------- |
|
||||
| `'` | `‘`, `’` |
|
||||
| `"` | `“`, `”` |
|
||||
| `--` | `–` |
|
||||
| `---` | `—` |
|
||||
| `...` | `…` |
|
||||
| `<<` | `«` |
|
||||
| `>>` | `»` |
|
||||
|
||||
You can override the default substitutions via `extensions.WithTypographicSubstitutions`:
|
||||
|
||||
```go
|
||||
markdown := goldmark.New(
|
||||
goldmark.WithExtensions(
|
||||
extension.NewTypographer(
|
||||
extension.WithTypographicSubstitutions(extension.TypographicSubstitutions{
|
||||
extension.LeftSingleQuote: []byte("‚"),
|
||||
extension.RightSingleQuote: nil, // nil disables a substitution
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
```
|
||||
|
||||
### Linkify extension
|
||||
|
||||
The Linkify extension implements [Autolinks(extension)](https://github.github.com/gfm/#autolinks-extension-), as
|
||||
defined in [GitHub Flavored Markdown Spec](https://github.github.com/gfm/).
|
||||
|
||||
Since the spec does not define details about URLs, there are numerous ambiguous cases.
|
||||
|
||||
You can override autolinking patterns via options.
|
||||
|
||||
| Functional option | Type | Description |
|
||||
| ----------------- | ---- | ----------- |
|
||||
| `extension.WithLinkifyAllowedProtocols` | `[][]byte` | List of allowed protocols such as `[][]byte{ []byte("http:") }` |
|
||||
| `extension.WithLinkifyURLRegexp` | `*regexp.Regexp` | Regexp that defines URLs, including protocols |
|
||||
| `extension.WithLinkifyWWWRegexp` | `*regexp.Regexp` | Regexp that defines URL starting with `www.`. This pattern corresponds to [the extended www autolink](https://github.github.com/gfm/#extended-www-autolink) |
|
||||
| `extension.WithLinkifyEmailRegexp` | `*regexp.Regexp` | Regexp that defines email addresses` |
|
||||
|
||||
Example, using [xurls](https://github.com/mvdan/xurls):
|
||||
|
||||
```go
|
||||
import "mvdan.cc/xurls/v2"
|
||||
|
||||
markdown := goldmark.New(
|
||||
goldmark.WithRendererOptions(
|
||||
html.WithXHTML(),
|
||||
html.WithUnsafe(),
|
||||
),
|
||||
goldmark.WithExtensions(
|
||||
extension.NewLinkify(
|
||||
extension.WithLinkifyAllowedProtocols([][]byte{
|
||||
[]byte("http:"),
|
||||
[]byte("https:"),
|
||||
}),
|
||||
extension.WithLinkifyURLRegexp(
|
||||
xurls.Strict,
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
```
|
||||
|
||||
### Footnotes extension
|
||||
|
||||
The Footnote extension implements [PHP Markdown Extra: Footnotes](https://michelf.ca/projects/php-markdown/extra/#footnotes).
|
||||
|
||||
This extension has some options:
|
||||
|
||||
| Functional option | Type | Description |
|
||||
| ----------------- | ---- | ----------- |
|
||||
| `extension.WithFootnoteIDPrefix` | `[]byte` | a prefix for the id attributes.|
|
||||
| `extension.WithFootnoteIDPrefixFunction` | `func(gast.Node) []byte` | a function that determines the id attribute for given Node.|
|
||||
| `extension.WithFootnoteLinkTitle` | `[]byte` | an optional title attribute for footnote links.|
|
||||
| `extension.WithFootnoteBacklinkTitle` | `[]byte` | an optional title attribute for footnote backlinks. |
|
||||
| `extension.WithFootnoteLinkClass` | `[]byte` | a class for footnote links. This defaults to `footnote-ref`. |
|
||||
| `extension.WithFootnoteBacklinkClass` | `[]byte` | a class for footnote backlinks. This defaults to `footnote-backref`. |
|
||||
| `extension.WithFootnoteBacklinkHTML` | `[]byte` | a class for footnote backlinks. This defaults to `↩︎`. |
|
||||
|
||||
Some options can have special substitutions. Occurrences of “^^” in the string will be replaced by the corresponding footnote number in the HTML output. Occurrences of “%%” will be replaced by a number for the reference (footnotes can have multiple references).
|
||||
|
||||
`extension.WithFootnoteIDPrefix` and `extension.WithFootnoteIDPrefixFunction` are useful if you have multiple Markdown documents displayed inside one HTML document to avoid footnote ids to clash each other.
|
||||
|
||||
`extension.WithFootnoteIDPrefix` sets fixed id prefix, so you may write codes like the following:
|
||||
|
||||
```go
|
||||
for _, path := range files {
|
||||
source := readAll(path)
|
||||
prefix := getPrefix(path)
|
||||
|
||||
markdown := goldmark.New(
|
||||
goldmark.WithExtensions(
|
||||
NewFootnote(
|
||||
WithFootnoteIDPrefix([]byte(path)),
|
||||
),
|
||||
),
|
||||
)
|
||||
var b bytes.Buffer
|
||||
err := markdown.Convert(source, &b)
|
||||
if err != nil {
|
||||
t.Error(err.Error())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
`extension.WithFootnoteIDPrefixFunction` determines an id prefix by calling given function, so you may write codes like the following:
|
||||
|
||||
```go
|
||||
markdown := goldmark.New(
|
||||
goldmark.WithExtensions(
|
||||
NewFootnote(
|
||||
WithFootnoteIDPrefixFunction(func(n gast.Node) []byte {
|
||||
v, ok := n.OwnerDocument().Meta()["footnote-prefix"]
|
||||
if ok {
|
||||
return util.StringToReadOnlyBytes(v.(string))
|
||||
}
|
||||
return nil
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
for _, path := range files {
|
||||
source := readAll(path)
|
||||
var b bytes.Buffer
|
||||
|
||||
doc := markdown.Parser().Parse(text.NewReader(source))
|
||||
doc.Meta()["footnote-prefix"] = getPrefix(path)
|
||||
err := markdown.Renderer().Render(&b, source, doc)
|
||||
}
|
||||
```
|
||||
|
||||
You can use [goldmark-meta](https://github.com/yuin/goldmark-meta) to define a id prefix in the markdown document:
|
||||
|
||||
|
||||
```markdown
|
||||
---
|
||||
title: document title
|
||||
slug: article1
|
||||
footnote-prefix: article1
|
||||
---
|
||||
|
||||
# My article
|
||||
|
||||
```
|
||||
|
||||
Security
|
||||
--------------------
|
||||
By default, goldmark does not render raw HTML or potentially-dangerous URLs.
|
||||
If you need to gain more control over untrusted contents, it is recommended that you
|
||||
use an HTML sanitizer such as [bluemonday](https://github.com/microcosm-cc/bluemonday).
|
||||
|
||||
Benchmark
|
||||
--------------------
|
||||
You can run this benchmark in the `_benchmark` directory.
|
||||
|
||||
### against other golang libraries
|
||||
|
||||
blackfriday v2 seems to be the fastest, but as it is not CommonMark compliant, its performance cannot be directly compared to that of the CommonMark-compliant libraries.
|
||||
|
||||
goldmark, meanwhile, builds a clean, extensible AST structure, achieves full compliance with
|
||||
CommonMark, and consumes less memory, all while being reasonably fast.
|
||||
|
||||
- MBP 2019 13″(i5, 16GB), Go1.17
|
||||
|
||||
```
|
||||
BenchmarkMarkdown/Blackfriday-v2-8 302 3743747 ns/op 3290445 B/op 20050 allocs/op
|
||||
BenchmarkMarkdown/GoldMark-8 280 4200974 ns/op 2559738 B/op 13435 allocs/op
|
||||
BenchmarkMarkdown/CommonMark-8 226 5283686 ns/op 2702490 B/op 20792 allocs/op
|
||||
BenchmarkMarkdown/Lute-8 12 92652857 ns/op 10602649 B/op 40555 allocs/op
|
||||
BenchmarkMarkdown/GoMarkdown-8 13 81380167 ns/op 2245002 B/op 22889 allocs/op
|
||||
```
|
||||
|
||||
### against cmark (CommonMark reference implementation written in C)
|
||||
|
||||
- MBP 2019 13″(i5, 16GB), Go1.17
|
||||
|
||||
```
|
||||
----------- cmark -----------
|
||||
file: _data.md
|
||||
iteration: 50
|
||||
average: 0.0044073057 sec
|
||||
------- goldmark -------
|
||||
file: _data.md
|
||||
iteration: 50
|
||||
average: 0.0041611990 sec
|
||||
```
|
||||
|
||||
As you can see, goldmark's performance is on par with cmark's.
|
||||
|
||||
Extensions
|
||||
--------------------
|
||||
|
||||
- [goldmark-meta](https://github.com/yuin/goldmark-meta): A YAML metadata
|
||||
extension for the goldmark Markdown parser.
|
||||
- [goldmark-highlighting](https://github.com/yuin/goldmark-highlighting): A syntax-highlighting extension
|
||||
for the goldmark markdown parser.
|
||||
- [goldmark-emoji](https://github.com/yuin/goldmark-emoji): An emoji
|
||||
extension for the goldmark Markdown parser.
|
||||
- [goldmark-mathjax](https://github.com/litao91/goldmark-mathjax): Mathjax support for the goldmark markdown parser
|
||||
- [goldmark-pdf](https://github.com/stephenafamo/goldmark-pdf): A PDF renderer that can be passed to `goldmark.WithRenderer()`.
|
||||
- [goldmark-hashtag](https://github.com/abhinav/goldmark-hashtag): Adds support for `#hashtag`-based tagging to goldmark.
|
||||
- [goldmark-wikilink](https://github.com/abhinav/goldmark-wikilink): Adds support for `[[wiki]]`-style links to goldmark.
|
||||
- [goldmark-toc](https://github.com/abhinav/goldmark-toc): Adds support for generating tables-of-contents for goldmark documents.
|
||||
- [goldmark-mermaid](https://github.com/abhinav/goldmark-mermaid): Adds support for renderng [Mermaid](https://mermaid-js.github.io/mermaid/) diagrams in goldmark documents.
|
||||
- [goldmark-pikchr](https://github.com/jchenry/goldmark-pikchr): Adds support for renderng [Pikchr](https://pikchr.org/home/doc/trunk/homepage.md) diagrams in goldmark documents.
|
||||
- [goldmark-embed](https://github.com/13rac1/goldmark-embed): Adds support for rendering embeds from YouTube links.
|
||||
|
||||
|
||||
goldmark internal(for extension developers)
|
||||
----------------------------------------------
|
||||
### Overview
|
||||
goldmark's Markdown processing is outlined in the diagram below.
|
||||
|
||||
```
|
||||
<Markdown in []byte, parser.Context>
|
||||
|
|
||||
V
|
||||
+-------- parser.Parser ---------------------------
|
||||
| 1. Parse block elements into AST
|
||||
| 1. If a parsed block is a paragraph, apply
|
||||
| ast.ParagraphTransformer
|
||||
| 2. Traverse AST and parse blocks.
|
||||
| 1. Process delimiters(emphasis) at the end of
|
||||
| block parsing
|
||||
| 3. Apply parser.ASTTransformers to AST
|
||||
|
|
||||
V
|
||||
<ast.Node>
|
||||
|
|
||||
V
|
||||
+------- renderer.Renderer ------------------------
|
||||
| 1. Traverse AST and apply renderer.NodeRenderer
|
||||
| corespond to the node type
|
||||
|
||||
|
|
||||
V
|
||||
<Output>
|
||||
```
|
||||
|
||||
### Parsing
|
||||
Markdown documents are read through `text.Reader` interface.
|
||||
|
||||
AST nodes do not have concrete text. AST nodes have segment information of the documents, represented by `text.Segment` .
|
||||
|
||||
`text.Segment` has 3 attributes: `Start`, `End`, `Padding` .
|
||||
|
||||
(TBC)
|
||||
|
||||
**TODO**
|
||||
|
||||
See `extension` directory for examples of extensions.
|
||||
|
||||
Summary:
|
||||
|
||||
1. Define AST Node as a struct in which `ast.BaseBlock` or `ast.BaseInline` is embedded.
|
||||
2. Write a parser that implements `parser.BlockParser` or `parser.InlineParser`.
|
||||
3. Write a renderer that implements `renderer.NodeRenderer`.
|
||||
4. Define your goldmark extension that implements `goldmark.Extender`.
|
||||
|
||||
|
||||
Donation
|
||||
--------------------
|
||||
BTC: 1NEDSyUmo4SMTDP83JJQSWi1MvQUGGNMZB
|
||||
|
||||
License
|
||||
--------------------
|
||||
MIT
|
||||
|
||||
Author
|
||||
--------------------
|
||||
Yusuke Inuzuka
|
|
@ -0,0 +1,508 @@
|
|||
// Package ast defines AST nodes that represent markdown elements.
|
||||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
textm "github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
// A NodeType indicates what type a node belongs to.
|
||||
type NodeType int
|
||||
|
||||
const (
|
||||
// TypeBlock indicates that a node is kind of block nodes.
|
||||
TypeBlock NodeType = iota + 1
|
||||
// TypeInline indicates that a node is kind of inline nodes.
|
||||
TypeInline
|
||||
// TypeDocument indicates that a node is kind of document nodes.
|
||||
TypeDocument
|
||||
)
|
||||
|
||||
// NodeKind indicates more specific type than NodeType.
|
||||
type NodeKind int
|
||||
|
||||
func (k NodeKind) String() string {
|
||||
return kindNames[k]
|
||||
}
|
||||
|
||||
var kindMax NodeKind
|
||||
var kindNames = []string{""}
|
||||
|
||||
// NewNodeKind returns a new Kind value.
|
||||
func NewNodeKind(name string) NodeKind {
|
||||
kindMax++
|
||||
kindNames = append(kindNames, name)
|
||||
return kindMax
|
||||
}
|
||||
|
||||
// An Attribute is an attribute of the Node
|
||||
type Attribute struct {
|
||||
Name []byte
|
||||
Value interface{}
|
||||
}
|
||||
|
||||
// A Node interface defines basic AST node functionalities.
|
||||
type Node interface {
|
||||
// Type returns a type of this node.
|
||||
Type() NodeType
|
||||
|
||||
// Kind returns a kind of this node.
|
||||
Kind() NodeKind
|
||||
|
||||
// NextSibling returns a next sibling node of this node.
|
||||
NextSibling() Node
|
||||
|
||||
// PreviousSibling returns a previous sibling node of this node.
|
||||
PreviousSibling() Node
|
||||
|
||||
// Parent returns a parent node of this node.
|
||||
Parent() Node
|
||||
|
||||
// SetParent sets a parent node to this node.
|
||||
SetParent(Node)
|
||||
|
||||
// SetPreviousSibling sets a previous sibling node to this node.
|
||||
SetPreviousSibling(Node)
|
||||
|
||||
// SetNextSibling sets a next sibling node to this node.
|
||||
SetNextSibling(Node)
|
||||
|
||||
// HasChildren returns true if this node has any children, otherwise false.
|
||||
HasChildren() bool
|
||||
|
||||
// ChildCount returns a total number of children.
|
||||
ChildCount() int
|
||||
|
||||
// FirstChild returns a first child of this node.
|
||||
FirstChild() Node
|
||||
|
||||
// LastChild returns a last child of this node.
|
||||
LastChild() Node
|
||||
|
||||
// AppendChild append a node child to the tail of the children.
|
||||
AppendChild(self, child Node)
|
||||
|
||||
// RemoveChild removes a node child from this node.
|
||||
// If a node child is not children of this node, RemoveChild nothing to do.
|
||||
RemoveChild(self, child Node)
|
||||
|
||||
// RemoveChildren removes all children from this node.
|
||||
RemoveChildren(self Node)
|
||||
|
||||
// SortChildren sorts childrens by comparator.
|
||||
SortChildren(comparator func(n1, n2 Node) int)
|
||||
|
||||
// ReplaceChild replace a node v1 with a node insertee.
|
||||
// If v1 is not children of this node, ReplaceChild append a insetee to the
|
||||
// tail of the children.
|
||||
ReplaceChild(self, v1, insertee Node)
|
||||
|
||||
// InsertBefore inserts a node insertee before a node v1.
|
||||
// If v1 is not children of this node, InsertBefore append a insetee to the
|
||||
// tail of the children.
|
||||
InsertBefore(self, v1, insertee Node)
|
||||
|
||||
// InsertAfterinserts a node insertee after a node v1.
|
||||
// If v1 is not children of this node, InsertBefore append a insetee to the
|
||||
// tail of the children.
|
||||
InsertAfter(self, v1, insertee Node)
|
||||
|
||||
// OwnerDocument returns this node's owner document.
|
||||
// If this node is not a child of the Document node, OwnerDocument
|
||||
// returns nil.
|
||||
OwnerDocument() *Document
|
||||
|
||||
// Dump dumps an AST tree structure to stdout.
|
||||
// This function completely aimed for debugging.
|
||||
// level is a indent level. Implementer should indent informations with
|
||||
// 2 * level spaces.
|
||||
Dump(source []byte, level int)
|
||||
|
||||
// Text returns text values of this node.
|
||||
Text(source []byte) []byte
|
||||
|
||||
// HasBlankPreviousLines returns true if the row before this node is blank,
|
||||
// otherwise false.
|
||||
// This method is valid only for block nodes.
|
||||
HasBlankPreviousLines() bool
|
||||
|
||||
// SetBlankPreviousLines sets whether the row before this node is blank.
|
||||
// This method is valid only for block nodes.
|
||||
SetBlankPreviousLines(v bool)
|
||||
|
||||
// Lines returns text segments that hold positions in a source.
|
||||
// This method is valid only for block nodes.
|
||||
Lines() *textm.Segments
|
||||
|
||||
// SetLines sets text segments that hold positions in a source.
|
||||
// This method is valid only for block nodes.
|
||||
SetLines(*textm.Segments)
|
||||
|
||||
// IsRaw returns true if contents should be rendered as 'raw' contents.
|
||||
IsRaw() bool
|
||||
|
||||
// SetAttribute sets the given value to the attributes.
|
||||
SetAttribute(name []byte, value interface{})
|
||||
|
||||
// SetAttributeString sets the given value to the attributes.
|
||||
SetAttributeString(name string, value interface{})
|
||||
|
||||
// Attribute returns a (attribute value, true) if an attribute
|
||||
// associated with the given name is found, otherwise
|
||||
// (nil, false)
|
||||
Attribute(name []byte) (interface{}, bool)
|
||||
|
||||
// AttributeString returns a (attribute value, true) if an attribute
|
||||
// associated with the given name is found, otherwise
|
||||
// (nil, false)
|
||||
AttributeString(name string) (interface{}, bool)
|
||||
|
||||
// Attributes returns a list of attributes.
|
||||
// This may be a nil if there are no attributes.
|
||||
Attributes() []Attribute
|
||||
|
||||
// RemoveAttributes removes all attributes from this node.
|
||||
RemoveAttributes()
|
||||
}
|
||||
|
||||
// A BaseNode struct implements the Node interface partialliy.
|
||||
type BaseNode struct {
|
||||
firstChild Node
|
||||
lastChild Node
|
||||
parent Node
|
||||
next Node
|
||||
prev Node
|
||||
childCount int
|
||||
attributes []Attribute
|
||||
}
|
||||
|
||||
func ensureIsolated(v Node) {
|
||||
if p := v.Parent(); p != nil {
|
||||
p.RemoveChild(p, v)
|
||||
}
|
||||
}
|
||||
|
||||
// HasChildren implements Node.HasChildren .
|
||||
func (n *BaseNode) HasChildren() bool {
|
||||
return n.firstChild != nil
|
||||
}
|
||||
|
||||
// SetPreviousSibling implements Node.SetPreviousSibling .
|
||||
func (n *BaseNode) SetPreviousSibling(v Node) {
|
||||
n.prev = v
|
||||
}
|
||||
|
||||
// SetNextSibling implements Node.SetNextSibling .
|
||||
func (n *BaseNode) SetNextSibling(v Node) {
|
||||
n.next = v
|
||||
}
|
||||
|
||||
// PreviousSibling implements Node.PreviousSibling .
|
||||
func (n *BaseNode) PreviousSibling() Node {
|
||||
return n.prev
|
||||
}
|
||||
|
||||
// NextSibling implements Node.NextSibling .
|
||||
func (n *BaseNode) NextSibling() Node {
|
||||
return n.next
|
||||
}
|
||||
|
||||
// RemoveChild implements Node.RemoveChild .
|
||||
func (n *BaseNode) RemoveChild(self, v Node) {
|
||||
if v.Parent() != self {
|
||||
return
|
||||
}
|
||||
n.childCount--
|
||||
prev := v.PreviousSibling()
|
||||
next := v.NextSibling()
|
||||
if prev != nil {
|
||||
prev.SetNextSibling(next)
|
||||
} else {
|
||||
n.firstChild = next
|
||||
}
|
||||
if next != nil {
|
||||
next.SetPreviousSibling(prev)
|
||||
} else {
|
||||
n.lastChild = prev
|
||||
}
|
||||
v.SetParent(nil)
|
||||
v.SetPreviousSibling(nil)
|
||||
v.SetNextSibling(nil)
|
||||
}
|
||||
|
||||
// RemoveChildren implements Node.RemoveChildren .
|
||||
func (n *BaseNode) RemoveChildren(self Node) {
|
||||
for c := n.firstChild; c != nil; {
|
||||
c.SetParent(nil)
|
||||
c.SetPreviousSibling(nil)
|
||||
next := c.NextSibling()
|
||||
c.SetNextSibling(nil)
|
||||
c = next
|
||||
}
|
||||
n.firstChild = nil
|
||||
n.lastChild = nil
|
||||
n.childCount = 0
|
||||
}
|
||||
|
||||
// SortChildren implements Node.SortChildren
|
||||
func (n *BaseNode) SortChildren(comparator func(n1, n2 Node) int) {
|
||||
var sorted Node
|
||||
current := n.firstChild
|
||||
for current != nil {
|
||||
next := current.NextSibling()
|
||||
if sorted == nil || comparator(sorted, current) >= 0 {
|
||||
current.SetNextSibling(sorted)
|
||||
if sorted != nil {
|
||||
sorted.SetPreviousSibling(current)
|
||||
}
|
||||
sorted = current
|
||||
sorted.SetPreviousSibling(nil)
|
||||
} else {
|
||||
c := sorted
|
||||
for c.NextSibling() != nil && comparator(c.NextSibling(), current) < 0 {
|
||||
c = c.NextSibling()
|
||||
}
|
||||
current.SetNextSibling(c.NextSibling())
|
||||
current.SetPreviousSibling(c)
|
||||
if c.NextSibling() != nil {
|
||||
c.NextSibling().SetPreviousSibling(current)
|
||||
}
|
||||
c.SetNextSibling(current)
|
||||
}
|
||||
current = next
|
||||
}
|
||||
n.firstChild = sorted
|
||||
for c := n.firstChild; c != nil; c = c.NextSibling() {
|
||||
n.lastChild = c
|
||||
}
|
||||
}
|
||||
|
||||
// FirstChild implements Node.FirstChild .
|
||||
func (n *BaseNode) FirstChild() Node {
|
||||
return n.firstChild
|
||||
}
|
||||
|
||||
// LastChild implements Node.LastChild .
|
||||
func (n *BaseNode) LastChild() Node {
|
||||
return n.lastChild
|
||||
}
|
||||
|
||||
// ChildCount implements Node.ChildCount .
|
||||
func (n *BaseNode) ChildCount() int {
|
||||
return n.childCount
|
||||
}
|
||||
|
||||
// Parent implements Node.Parent .
|
||||
func (n *BaseNode) Parent() Node {
|
||||
return n.parent
|
||||
}
|
||||
|
||||
// SetParent implements Node.SetParent .
|
||||
func (n *BaseNode) SetParent(v Node) {
|
||||
n.parent = v
|
||||
}
|
||||
|
||||
// AppendChild implements Node.AppendChild .
|
||||
func (n *BaseNode) AppendChild(self, v Node) {
|
||||
ensureIsolated(v)
|
||||
if n.firstChild == nil {
|
||||
n.firstChild = v
|
||||
v.SetNextSibling(nil)
|
||||
v.SetPreviousSibling(nil)
|
||||
} else {
|
||||
last := n.lastChild
|
||||
last.SetNextSibling(v)
|
||||
v.SetPreviousSibling(last)
|
||||
}
|
||||
v.SetParent(self)
|
||||
n.lastChild = v
|
||||
n.childCount++
|
||||
}
|
||||
|
||||
// ReplaceChild implements Node.ReplaceChild .
|
||||
func (n *BaseNode) ReplaceChild(self, v1, insertee Node) {
|
||||
n.InsertBefore(self, v1, insertee)
|
||||
n.RemoveChild(self, v1)
|
||||
}
|
||||
|
||||
// InsertAfter implements Node.InsertAfter .
|
||||
func (n *BaseNode) InsertAfter(self, v1, insertee Node) {
|
||||
n.InsertBefore(self, v1.NextSibling(), insertee)
|
||||
}
|
||||
|
||||
// InsertBefore implements Node.InsertBefore .
|
||||
func (n *BaseNode) InsertBefore(self, v1, insertee Node) {
|
||||
n.childCount++
|
||||
if v1 == nil {
|
||||
n.AppendChild(self, insertee)
|
||||
return
|
||||
}
|
||||
ensureIsolated(insertee)
|
||||
if v1.Parent() == self {
|
||||
c := v1
|
||||
prev := c.PreviousSibling()
|
||||
if prev != nil {
|
||||
prev.SetNextSibling(insertee)
|
||||
insertee.SetPreviousSibling(prev)
|
||||
} else {
|
||||
n.firstChild = insertee
|
||||
insertee.SetPreviousSibling(nil)
|
||||
}
|
||||
insertee.SetNextSibling(c)
|
||||
c.SetPreviousSibling(insertee)
|
||||
insertee.SetParent(self)
|
||||
}
|
||||
}
|
||||
|
||||
// OwnerDocument implements Node.OwnerDocument
|
||||
func (n *BaseNode) OwnerDocument() *Document {
|
||||
d := n.Parent()
|
||||
for {
|
||||
p := d.Parent()
|
||||
if p == nil {
|
||||
if v, ok := d.(*Document); ok {
|
||||
return v
|
||||
}
|
||||
break
|
||||
}
|
||||
d = p
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Text implements Node.Text .
|
||||
func (n *BaseNode) Text(source []byte) []byte {
|
||||
var buf bytes.Buffer
|
||||
for c := n.firstChild; c != nil; c = c.NextSibling() {
|
||||
buf.Write(c.Text(source))
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// SetAttribute implements Node.SetAttribute.
|
||||
func (n *BaseNode) SetAttribute(name []byte, value interface{}) {
|
||||
if n.attributes == nil {
|
||||
n.attributes = make([]Attribute, 0, 10)
|
||||
} else {
|
||||
for i, a := range n.attributes {
|
||||
if bytes.Equal(a.Name, name) {
|
||||
n.attributes[i].Name = name
|
||||
n.attributes[i].Value = value
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
n.attributes = append(n.attributes, Attribute{name, value})
|
||||
}
|
||||
|
||||
// SetAttributeString implements Node.SetAttributeString
|
||||
func (n *BaseNode) SetAttributeString(name string, value interface{}) {
|
||||
n.SetAttribute(util.StringToReadOnlyBytes(name), value)
|
||||
}
|
||||
|
||||
// Attribute implements Node.Attribute.
|
||||
func (n *BaseNode) Attribute(name []byte) (interface{}, bool) {
|
||||
if n.attributes == nil {
|
||||
return nil, false
|
||||
}
|
||||
for i, a := range n.attributes {
|
||||
if bytes.Equal(a.Name, name) {
|
||||
return n.attributes[i].Value, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// AttributeString implements Node.AttributeString.
|
||||
func (n *BaseNode) AttributeString(s string) (interface{}, bool) {
|
||||
return n.Attribute(util.StringToReadOnlyBytes(s))
|
||||
}
|
||||
|
||||
// Attributes implements Node.Attributes
|
||||
func (n *BaseNode) Attributes() []Attribute {
|
||||
return n.attributes
|
||||
}
|
||||
|
||||
// RemoveAttributes implements Node.RemoveAttributes
|
||||
func (n *BaseNode) RemoveAttributes() {
|
||||
n.attributes = nil
|
||||
}
|
||||
|
||||
// DumpHelper is a helper function to implement Node.Dump.
|
||||
// kv is pairs of an attribute name and an attribute value.
|
||||
// cb is a function called after wrote a name and attributes.
|
||||
func DumpHelper(v Node, source []byte, level int, kv map[string]string, cb func(int)) {
|
||||
name := v.Kind().String()
|
||||
indent := strings.Repeat(" ", level)
|
||||
fmt.Printf("%s%s {\n", indent, name)
|
||||
indent2 := strings.Repeat(" ", level+1)
|
||||
if v.Type() == TypeBlock {
|
||||
fmt.Printf("%sRawText: \"", indent2)
|
||||
for i := 0; i < v.Lines().Len(); i++ {
|
||||
line := v.Lines().At(i)
|
||||
fmt.Printf("%s", line.Value(source))
|
||||
}
|
||||
fmt.Printf("\"\n")
|
||||
fmt.Printf("%sHasBlankPreviousLines: %v\n", indent2, v.HasBlankPreviousLines())
|
||||
}
|
||||
for name, value := range kv {
|
||||
fmt.Printf("%s%s: %s\n", indent2, name, value)
|
||||
}
|
||||
if cb != nil {
|
||||
cb(level + 1)
|
||||
}
|
||||
for c := v.FirstChild(); c != nil; c = c.NextSibling() {
|
||||
c.Dump(source, level+1)
|
||||
}
|
||||
fmt.Printf("%s}\n", indent)
|
||||
}
|
||||
|
||||
// WalkStatus represents a current status of the Walk function.
|
||||
type WalkStatus int
|
||||
|
||||
const (
|
||||
// WalkStop indicates no more walking needed.
|
||||
WalkStop WalkStatus = iota + 1
|
||||
|
||||
// WalkSkipChildren indicates that Walk wont walk on children of current
|
||||
// node.
|
||||
WalkSkipChildren
|
||||
|
||||
// WalkContinue indicates that Walk can continue to walk.
|
||||
WalkContinue
|
||||
)
|
||||
|
||||
// Walker is a function that will be called when Walk find a
|
||||
// new node.
|
||||
// entering is set true before walks children, false after walked children.
|
||||
// If Walker returns error, Walk function immediately stop walking.
|
||||
type Walker func(n Node, entering bool) (WalkStatus, error)
|
||||
|
||||
// Walk walks a AST tree by the depth first search algorithm.
|
||||
func Walk(n Node, walker Walker) error {
|
||||
_, err := walkHelper(n, walker)
|
||||
return err
|
||||
}
|
||||
|
||||
func walkHelper(n Node, walker Walker) (WalkStatus, error) {
|
||||
status, err := walker(n, true)
|
||||
if err != nil || status == WalkStop {
|
||||
return status, err
|
||||
}
|
||||
if status != WalkSkipChildren {
|
||||
for c := n.FirstChild(); c != nil; c = c.NextSibling() {
|
||||
if st, err := walkHelper(c, walker); err != nil || st == WalkStop {
|
||||
return WalkStop, err
|
||||
}
|
||||
}
|
||||
}
|
||||
status, err = walker(n, false)
|
||||
if err != nil || status == WalkStop {
|
||||
return WalkStop, err
|
||||
}
|
||||
return WalkContinue, nil
|
||||
}
|
|
@ -0,0 +1,508 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
textm "github.com/yuin/goldmark/text"
|
||||
)
|
||||
|
||||
// A BaseBlock struct implements the Node interface partialliy.
|
||||
type BaseBlock struct {
|
||||
BaseNode
|
||||
blankPreviousLines bool
|
||||
lines *textm.Segments
|
||||
}
|
||||
|
||||
// Type implements Node.Type
|
||||
func (b *BaseBlock) Type() NodeType {
|
||||
return TypeBlock
|
||||
}
|
||||
|
||||
// IsRaw implements Node.IsRaw
|
||||
func (b *BaseBlock) IsRaw() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// HasBlankPreviousLines implements Node.HasBlankPreviousLines.
|
||||
func (b *BaseBlock) HasBlankPreviousLines() bool {
|
||||
return b.blankPreviousLines
|
||||
}
|
||||
|
||||
// SetBlankPreviousLines implements Node.SetBlankPreviousLines.
|
||||
func (b *BaseBlock) SetBlankPreviousLines(v bool) {
|
||||
b.blankPreviousLines = v
|
||||
}
|
||||
|
||||
// Lines implements Node.Lines
|
||||
func (b *BaseBlock) Lines() *textm.Segments {
|
||||
if b.lines == nil {
|
||||
b.lines = textm.NewSegments()
|
||||
}
|
||||
return b.lines
|
||||
}
|
||||
|
||||
// SetLines implements Node.SetLines
|
||||
func (b *BaseBlock) SetLines(v *textm.Segments) {
|
||||
b.lines = v
|
||||
}
|
||||
|
||||
// A Document struct is a root node of Markdown text.
|
||||
type Document struct {
|
||||
BaseBlock
|
||||
|
||||
meta map[string]interface{}
|
||||
}
|
||||
|
||||
// KindDocument is a NodeKind of the Document node.
|
||||
var KindDocument = NewNodeKind("Document")
|
||||
|
||||
// Dump implements Node.Dump .
|
||||
func (n *Document) Dump(source []byte, level int) {
|
||||
DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// Type implements Node.Type .
|
||||
func (n *Document) Type() NodeType {
|
||||
return TypeDocument
|
||||
}
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Document) Kind() NodeKind {
|
||||
return KindDocument
|
||||
}
|
||||
|
||||
// OwnerDocument implements Node.OwnerDocument
|
||||
func (n *Document) OwnerDocument() *Document {
|
||||
return n
|
||||
}
|
||||
|
||||
// Meta returns metadata of this document.
|
||||
func (n *Document) Meta() map[string]interface{} {
|
||||
if n.meta == nil {
|
||||
n.meta = map[string]interface{}{}
|
||||
}
|
||||
return n.meta
|
||||
}
|
||||
|
||||
// SetMeta sets given metadata to this document.
|
||||
func (n *Document) SetMeta(meta map[string]interface{}) {
|
||||
if n.meta == nil {
|
||||
n.meta = map[string]interface{}{}
|
||||
}
|
||||
for k, v := range meta {
|
||||
n.meta[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// AddMeta adds given metadata to this document.
|
||||
func (n *Document) AddMeta(key string, value interface{}) {
|
||||
if n.meta == nil {
|
||||
n.meta = map[string]interface{}{}
|
||||
}
|
||||
n.meta[key] = value
|
||||
}
|
||||
|
||||
// NewDocument returns a new Document node.
|
||||
func NewDocument() *Document {
|
||||
return &Document{
|
||||
BaseBlock: BaseBlock{},
|
||||
meta: nil,
|
||||
}
|
||||
}
|
||||
|
||||
// A TextBlock struct is a node whose lines
|
||||
// should be rendered without any containers.
|
||||
type TextBlock struct {
|
||||
BaseBlock
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump .
|
||||
func (n *TextBlock) Dump(source []byte, level int) {
|
||||
DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindTextBlock is a NodeKind of the TextBlock node.
|
||||
var KindTextBlock = NewNodeKind("TextBlock")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *TextBlock) Kind() NodeKind {
|
||||
return KindTextBlock
|
||||
}
|
||||
|
||||
// NewTextBlock returns a new TextBlock node.
|
||||
func NewTextBlock() *TextBlock {
|
||||
return &TextBlock{
|
||||
BaseBlock: BaseBlock{},
|
||||
}
|
||||
}
|
||||
|
||||
// A Paragraph struct represents a paragraph of Markdown text.
|
||||
type Paragraph struct {
|
||||
BaseBlock
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump .
|
||||
func (n *Paragraph) Dump(source []byte, level int) {
|
||||
DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindParagraph is a NodeKind of the Paragraph node.
|
||||
var KindParagraph = NewNodeKind("Paragraph")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Paragraph) Kind() NodeKind {
|
||||
return KindParagraph
|
||||
}
|
||||
|
||||
// NewParagraph returns a new Paragraph node.
|
||||
func NewParagraph() *Paragraph {
|
||||
return &Paragraph{
|
||||
BaseBlock: BaseBlock{},
|
||||
}
|
||||
}
|
||||
|
||||
// IsParagraph returns true if the given node implements the Paragraph interface,
|
||||
// otherwise false.
|
||||
func IsParagraph(node Node) bool {
|
||||
_, ok := node.(*Paragraph)
|
||||
return ok
|
||||
}
|
||||
|
||||
// A Heading struct represents headings like SetextHeading and ATXHeading.
|
||||
type Heading struct {
|
||||
BaseBlock
|
||||
// Level returns a level of this heading.
|
||||
// This value is between 1 and 6.
|
||||
Level int
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump .
|
||||
func (n *Heading) Dump(source []byte, level int) {
|
||||
m := map[string]string{
|
||||
"Level": fmt.Sprintf("%d", n.Level),
|
||||
}
|
||||
DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindHeading is a NodeKind of the Heading node.
|
||||
var KindHeading = NewNodeKind("Heading")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Heading) Kind() NodeKind {
|
||||
return KindHeading
|
||||
}
|
||||
|
||||
// NewHeading returns a new Heading node.
|
||||
func NewHeading(level int) *Heading {
|
||||
return &Heading{
|
||||
BaseBlock: BaseBlock{},
|
||||
Level: level,
|
||||
}
|
||||
}
|
||||
|
||||
// A ThematicBreak struct represents a thematic break of Markdown text.
|
||||
type ThematicBreak struct {
|
||||
BaseBlock
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump .
|
||||
func (n *ThematicBreak) Dump(source []byte, level int) {
|
||||
DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindThematicBreak is a NodeKind of the ThematicBreak node.
|
||||
var KindThematicBreak = NewNodeKind("ThematicBreak")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *ThematicBreak) Kind() NodeKind {
|
||||
return KindThematicBreak
|
||||
}
|
||||
|
||||
// NewThematicBreak returns a new ThematicBreak node.
|
||||
func NewThematicBreak() *ThematicBreak {
|
||||
return &ThematicBreak{
|
||||
BaseBlock: BaseBlock{},
|
||||
}
|
||||
}
|
||||
|
||||
// A CodeBlock interface represents an indented code block of Markdown text.
|
||||
type CodeBlock struct {
|
||||
BaseBlock
|
||||
}
|
||||
|
||||
// IsRaw implements Node.IsRaw.
|
||||
func (n *CodeBlock) IsRaw() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump .
|
||||
func (n *CodeBlock) Dump(source []byte, level int) {
|
||||
DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindCodeBlock is a NodeKind of the CodeBlock node.
|
||||
var KindCodeBlock = NewNodeKind("CodeBlock")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *CodeBlock) Kind() NodeKind {
|
||||
return KindCodeBlock
|
||||
}
|
||||
|
||||
// NewCodeBlock returns a new CodeBlock node.
|
||||
func NewCodeBlock() *CodeBlock {
|
||||
return &CodeBlock{
|
||||
BaseBlock: BaseBlock{},
|
||||
}
|
||||
}
|
||||
|
||||
// A FencedCodeBlock struct represents a fenced code block of Markdown text.
|
||||
type FencedCodeBlock struct {
|
||||
BaseBlock
|
||||
// Info returns a info text of this fenced code block.
|
||||
Info *Text
|
||||
|
||||
language []byte
|
||||
}
|
||||
|
||||
// Language returns an language in an info string.
|
||||
// Language returns nil if this node does not have an info string.
|
||||
func (n *FencedCodeBlock) Language(source []byte) []byte {
|
||||
if n.language == nil && n.Info != nil {
|
||||
segment := n.Info.Segment
|
||||
info := segment.Value(source)
|
||||
i := 0
|
||||
for ; i < len(info); i++ {
|
||||
if info[i] == ' ' {
|
||||
break
|
||||
}
|
||||
}
|
||||
n.language = info[:i]
|
||||
}
|
||||
return n.language
|
||||
}
|
||||
|
||||
// IsRaw implements Node.IsRaw.
|
||||
func (n *FencedCodeBlock) IsRaw() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump .
|
||||
func (n *FencedCodeBlock) Dump(source []byte, level int) {
|
||||
m := map[string]string{}
|
||||
if n.Info != nil {
|
||||
m["Info"] = fmt.Sprintf("\"%s\"", n.Info.Text(source))
|
||||
}
|
||||
DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindFencedCodeBlock is a NodeKind of the FencedCodeBlock node.
|
||||
var KindFencedCodeBlock = NewNodeKind("FencedCodeBlock")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *FencedCodeBlock) Kind() NodeKind {
|
||||
return KindFencedCodeBlock
|
||||
}
|
||||
|
||||
// NewFencedCodeBlock return a new FencedCodeBlock node.
|
||||
func NewFencedCodeBlock(info *Text) *FencedCodeBlock {
|
||||
return &FencedCodeBlock{
|
||||
BaseBlock: BaseBlock{},
|
||||
Info: info,
|
||||
}
|
||||
}
|
||||
|
||||
// A Blockquote struct represents an blockquote block of Markdown text.
|
||||
type Blockquote struct {
|
||||
BaseBlock
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump .
|
||||
func (n *Blockquote) Dump(source []byte, level int) {
|
||||
DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindBlockquote is a NodeKind of the Blockquote node.
|
||||
var KindBlockquote = NewNodeKind("Blockquote")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Blockquote) Kind() NodeKind {
|
||||
return KindBlockquote
|
||||
}
|
||||
|
||||
// NewBlockquote returns a new Blockquote node.
|
||||
func NewBlockquote() *Blockquote {
|
||||
return &Blockquote{
|
||||
BaseBlock: BaseBlock{},
|
||||
}
|
||||
}
|
||||
|
||||
// A List struct represents a list of Markdown text.
|
||||
type List struct {
|
||||
BaseBlock
|
||||
|
||||
// Marker is a marker character like '-', '+', ')' and '.'.
|
||||
Marker byte
|
||||
|
||||
// IsTight is a true if this list is a 'tight' list.
|
||||
// See https://spec.commonmark.org/0.30/#loose for details.
|
||||
IsTight bool
|
||||
|
||||
// Start is an initial number of this ordered list.
|
||||
// If this list is not an ordered list, Start is 0.
|
||||
Start int
|
||||
}
|
||||
|
||||
// IsOrdered returns true if this list is an ordered list, otherwise false.
|
||||
func (l *List) IsOrdered() bool {
|
||||
return l.Marker == '.' || l.Marker == ')'
|
||||
}
|
||||
|
||||
// CanContinue returns true if this list can continue with
|
||||
// the given mark and a list type, otherwise false.
|
||||
func (l *List) CanContinue(marker byte, isOrdered bool) bool {
|
||||
return marker == l.Marker && isOrdered == l.IsOrdered()
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (l *List) Dump(source []byte, level int) {
|
||||
m := map[string]string{
|
||||
"Ordered": fmt.Sprintf("%v", l.IsOrdered()),
|
||||
"Marker": fmt.Sprintf("%c", l.Marker),
|
||||
"Tight": fmt.Sprintf("%v", l.IsTight),
|
||||
}
|
||||
if l.IsOrdered() {
|
||||
m["Start"] = fmt.Sprintf("%d", l.Start)
|
||||
}
|
||||
DumpHelper(l, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindList is a NodeKind of the List node.
|
||||
var KindList = NewNodeKind("List")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (l *List) Kind() NodeKind {
|
||||
return KindList
|
||||
}
|
||||
|
||||
// NewList returns a new List node.
|
||||
func NewList(marker byte) *List {
|
||||
return &List{
|
||||
BaseBlock: BaseBlock{},
|
||||
Marker: marker,
|
||||
IsTight: true,
|
||||
}
|
||||
}
|
||||
|
||||
// A ListItem struct represents a list item of Markdown text.
|
||||
type ListItem struct {
|
||||
BaseBlock
|
||||
|
||||
// Offset is an offset position of this item.
|
||||
Offset int
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *ListItem) Dump(source []byte, level int) {
|
||||
m := map[string]string{
|
||||
"Offset": fmt.Sprintf("%d", n.Offset),
|
||||
}
|
||||
DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindListItem is a NodeKind of the ListItem node.
|
||||
var KindListItem = NewNodeKind("ListItem")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *ListItem) Kind() NodeKind {
|
||||
return KindListItem
|
||||
}
|
||||
|
||||
// NewListItem returns a new ListItem node.
|
||||
func NewListItem(offset int) *ListItem {
|
||||
return &ListItem{
|
||||
BaseBlock: BaseBlock{},
|
||||
Offset: offset,
|
||||
}
|
||||
}
|
||||
|
||||
// HTMLBlockType represents kinds of an html blocks.
|
||||
// See https://spec.commonmark.org/0.30/#html-blocks
|
||||
type HTMLBlockType int
|
||||
|
||||
const (
|
||||
// HTMLBlockType1 represents type 1 html blocks
|
||||
HTMLBlockType1 HTMLBlockType = iota + 1
|
||||
// HTMLBlockType2 represents type 2 html blocks
|
||||
HTMLBlockType2
|
||||
// HTMLBlockType3 represents type 3 html blocks
|
||||
HTMLBlockType3
|
||||
// HTMLBlockType4 represents type 4 html blocks
|
||||
HTMLBlockType4
|
||||
// HTMLBlockType5 represents type 5 html blocks
|
||||
HTMLBlockType5
|
||||
// HTMLBlockType6 represents type 6 html blocks
|
||||
HTMLBlockType6
|
||||
// HTMLBlockType7 represents type 7 html blocks
|
||||
HTMLBlockType7
|
||||
)
|
||||
|
||||
// An HTMLBlock struct represents an html block of Markdown text.
|
||||
type HTMLBlock struct {
|
||||
BaseBlock
|
||||
|
||||
// Type is a type of this html block.
|
||||
HTMLBlockType HTMLBlockType
|
||||
|
||||
// ClosureLine is a line that closes this html block.
|
||||
ClosureLine textm.Segment
|
||||
}
|
||||
|
||||
// IsRaw implements Node.IsRaw.
|
||||
func (n *HTMLBlock) IsRaw() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// HasClosure returns true if this html block has a closure line,
|
||||
// otherwise false.
|
||||
func (n *HTMLBlock) HasClosure() bool {
|
||||
return n.ClosureLine.Start >= 0
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *HTMLBlock) Dump(source []byte, level int) {
|
||||
indent := strings.Repeat(" ", level)
|
||||
fmt.Printf("%s%s {\n", indent, "HTMLBlock")
|
||||
indent2 := strings.Repeat(" ", level+1)
|
||||
fmt.Printf("%sRawText: \"", indent2)
|
||||
for i := 0; i < n.Lines().Len(); i++ {
|
||||
s := n.Lines().At(i)
|
||||
fmt.Print(string(source[s.Start:s.Stop]))
|
||||
}
|
||||
fmt.Printf("\"\n")
|
||||
for c := n.FirstChild(); c != nil; c = c.NextSibling() {
|
||||
c.Dump(source, level+1)
|
||||
}
|
||||
if n.HasClosure() {
|
||||
cl := n.ClosureLine
|
||||
fmt.Printf("%sClosure: \"%s\"\n", indent2, string(cl.Value(source)))
|
||||
}
|
||||
fmt.Printf("%s}\n", indent)
|
||||
}
|
||||
|
||||
// KindHTMLBlock is a NodeKind of the HTMLBlock node.
|
||||
var KindHTMLBlock = NewNodeKind("HTMLBlock")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *HTMLBlock) Kind() NodeKind {
|
||||
return KindHTMLBlock
|
||||
}
|
||||
|
||||
// NewHTMLBlock returns a new HTMLBlock node.
|
||||
func NewHTMLBlock(typ HTMLBlockType) *HTMLBlock {
|
||||
return &HTMLBlock{
|
||||
BaseBlock: BaseBlock{},
|
||||
HTMLBlockType: typ,
|
||||
ClosureLine: textm.NewSegment(-1, -1),
|
||||
}
|
||||
}
|
|
@ -0,0 +1,548 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
textm "github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
// A BaseInline struct implements the Node interface partialliy.
|
||||
type BaseInline struct {
|
||||
BaseNode
|
||||
}
|
||||
|
||||
// Type implements Node.Type
|
||||
func (b *BaseInline) Type() NodeType {
|
||||
return TypeInline
|
||||
}
|
||||
|
||||
// IsRaw implements Node.IsRaw
|
||||
func (b *BaseInline) IsRaw() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// HasBlankPreviousLines implements Node.HasBlankPreviousLines.
|
||||
func (b *BaseInline) HasBlankPreviousLines() bool {
|
||||
panic("can not call with inline nodes.")
|
||||
}
|
||||
|
||||
// SetBlankPreviousLines implements Node.SetBlankPreviousLines.
|
||||
func (b *BaseInline) SetBlankPreviousLines(v bool) {
|
||||
panic("can not call with inline nodes.")
|
||||
}
|
||||
|
||||
// Lines implements Node.Lines
|
||||
func (b *BaseInline) Lines() *textm.Segments {
|
||||
panic("can not call with inline nodes.")
|
||||
}
|
||||
|
||||
// SetLines implements Node.SetLines
|
||||
func (b *BaseInline) SetLines(v *textm.Segments) {
|
||||
panic("can not call with inline nodes.")
|
||||
}
|
||||
|
||||
// A Text struct represents a textual content of the Markdown text.
|
||||
type Text struct {
|
||||
BaseInline
|
||||
// Segment is a position in a source text.
|
||||
Segment textm.Segment
|
||||
|
||||
flags uint8
|
||||
}
|
||||
|
||||
const (
|
||||
textSoftLineBreak = 1 << iota
|
||||
textHardLineBreak
|
||||
textRaw
|
||||
textCode
|
||||
)
|
||||
|
||||
func textFlagsString(flags uint8) string {
|
||||
buf := []string{}
|
||||
if flags&textSoftLineBreak != 0 {
|
||||
buf = append(buf, "SoftLineBreak")
|
||||
}
|
||||
if flags&textHardLineBreak != 0 {
|
||||
buf = append(buf, "HardLineBreak")
|
||||
}
|
||||
if flags&textRaw != 0 {
|
||||
buf = append(buf, "Raw")
|
||||
}
|
||||
if flags&textCode != 0 {
|
||||
buf = append(buf, "Code")
|
||||
}
|
||||
return strings.Join(buf, ", ")
|
||||
}
|
||||
|
||||
// Inline implements Inline.Inline.
|
||||
func (n *Text) Inline() {
|
||||
}
|
||||
|
||||
// SoftLineBreak returns true if this node ends with a new line,
|
||||
// otherwise false.
|
||||
func (n *Text) SoftLineBreak() bool {
|
||||
return n.flags&textSoftLineBreak != 0
|
||||
}
|
||||
|
||||
// SetSoftLineBreak sets whether this node ends with a new line.
|
||||
func (n *Text) SetSoftLineBreak(v bool) {
|
||||
if v {
|
||||
n.flags |= textSoftLineBreak
|
||||
} else {
|
||||
n.flags = n.flags &^ textHardLineBreak
|
||||
}
|
||||
}
|
||||
|
||||
// IsRaw returns true if this text should be rendered without unescaping
|
||||
// back slash escapes and resolving references.
|
||||
func (n *Text) IsRaw() bool {
|
||||
return n.flags&textRaw != 0
|
||||
}
|
||||
|
||||
// SetRaw sets whether this text should be rendered as raw contents.
|
||||
func (n *Text) SetRaw(v bool) {
|
||||
if v {
|
||||
n.flags |= textRaw
|
||||
} else {
|
||||
n.flags = n.flags &^ textRaw
|
||||
}
|
||||
}
|
||||
|
||||
// HardLineBreak returns true if this node ends with a hard line break.
|
||||
// See https://spec.commonmark.org/0.30/#hard-line-breaks for details.
|
||||
func (n *Text) HardLineBreak() bool {
|
||||
return n.flags&textHardLineBreak != 0
|
||||
}
|
||||
|
||||
// SetHardLineBreak sets whether this node ends with a hard line break.
|
||||
func (n *Text) SetHardLineBreak(v bool) {
|
||||
if v {
|
||||
n.flags |= textHardLineBreak
|
||||
} else {
|
||||
n.flags = n.flags &^ textHardLineBreak
|
||||
}
|
||||
}
|
||||
|
||||
// Merge merges a Node n into this node.
|
||||
// Merge returns true if the given node has been merged, otherwise false.
|
||||
func (n *Text) Merge(node Node, source []byte) bool {
|
||||
t, ok := node.(*Text)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
if n.Segment.Stop != t.Segment.Start || t.Segment.Padding != 0 || source[n.Segment.Stop-1] == '\n' || t.IsRaw() != n.IsRaw() {
|
||||
return false
|
||||
}
|
||||
n.Segment.Stop = t.Segment.Stop
|
||||
n.SetSoftLineBreak(t.SoftLineBreak())
|
||||
n.SetHardLineBreak(t.HardLineBreak())
|
||||
return true
|
||||
}
|
||||
|
||||
// Text implements Node.Text.
|
||||
func (n *Text) Text(source []byte) []byte {
|
||||
return n.Segment.Value(source)
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *Text) Dump(source []byte, level int) {
|
||||
fs := textFlagsString(n.flags)
|
||||
if len(fs) != 0 {
|
||||
fs = "(" + fs + ")"
|
||||
}
|
||||
fmt.Printf("%sText%s: \"%s\"\n", strings.Repeat(" ", level), fs, strings.TrimRight(string(n.Text(source)), "\n"))
|
||||
}
|
||||
|
||||
// KindText is a NodeKind of the Text node.
|
||||
var KindText = NewNodeKind("Text")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Text) Kind() NodeKind {
|
||||
return KindText
|
||||
}
|
||||
|
||||
// NewText returns a new Text node.
|
||||
func NewText() *Text {
|
||||
return &Text{
|
||||
BaseInline: BaseInline{},
|
||||
}
|
||||
}
|
||||
|
||||
// NewTextSegment returns a new Text node with the given source position.
|
||||
func NewTextSegment(v textm.Segment) *Text {
|
||||
return &Text{
|
||||
BaseInline: BaseInline{},
|
||||
Segment: v,
|
||||
}
|
||||
}
|
||||
|
||||
// NewRawTextSegment returns a new Text node with the given source position.
|
||||
// The new node should be rendered as raw contents.
|
||||
func NewRawTextSegment(v textm.Segment) *Text {
|
||||
t := &Text{
|
||||
BaseInline: BaseInline{},
|
||||
Segment: v,
|
||||
}
|
||||
t.SetRaw(true)
|
||||
return t
|
||||
}
|
||||
|
||||
// MergeOrAppendTextSegment merges a given s into the last child of the parent if
|
||||
// it can be merged, otherwise creates a new Text node and appends it to after current
|
||||
// last child.
|
||||
func MergeOrAppendTextSegment(parent Node, s textm.Segment) {
|
||||
last := parent.LastChild()
|
||||
t, ok := last.(*Text)
|
||||
if ok && t.Segment.Stop == s.Start && !t.SoftLineBreak() {
|
||||
t.Segment = t.Segment.WithStop(s.Stop)
|
||||
} else {
|
||||
parent.AppendChild(parent, NewTextSegment(s))
|
||||
}
|
||||
}
|
||||
|
||||
// MergeOrReplaceTextSegment merges a given s into a previous sibling of the node n
|
||||
// if a previous sibling of the node n is *Text, otherwise replaces Node n with s.
|
||||
func MergeOrReplaceTextSegment(parent Node, n Node, s textm.Segment) {
|
||||
prev := n.PreviousSibling()
|
||||
if t, ok := prev.(*Text); ok && t.Segment.Stop == s.Start && !t.SoftLineBreak() {
|
||||
t.Segment = t.Segment.WithStop(s.Stop)
|
||||
parent.RemoveChild(parent, n)
|
||||
} else {
|
||||
parent.ReplaceChild(parent, n, NewTextSegment(s))
|
||||
}
|
||||
}
|
||||
|
||||
// A String struct is a textual content that has a concrete value
|
||||
type String struct {
|
||||
BaseInline
|
||||
|
||||
Value []byte
|
||||
flags uint8
|
||||
}
|
||||
|
||||
// Inline implements Inline.Inline.
|
||||
func (n *String) Inline() {
|
||||
}
|
||||
|
||||
// IsRaw returns true if this text should be rendered without unescaping
|
||||
// back slash escapes and resolving references.
|
||||
func (n *String) IsRaw() bool {
|
||||
return n.flags&textRaw != 0
|
||||
}
|
||||
|
||||
// SetRaw sets whether this text should be rendered as raw contents.
|
||||
func (n *String) SetRaw(v bool) {
|
||||
if v {
|
||||
n.flags |= textRaw
|
||||
} else {
|
||||
n.flags = n.flags &^ textRaw
|
||||
}
|
||||
}
|
||||
|
||||
// IsCode returns true if this text should be rendered without any
|
||||
// modifications.
|
||||
func (n *String) IsCode() bool {
|
||||
return n.flags&textCode != 0
|
||||
}
|
||||
|
||||
// SetCode sets whether this text should be rendered without any modifications.
|
||||
func (n *String) SetCode(v bool) {
|
||||
if v {
|
||||
n.flags |= textCode
|
||||
} else {
|
||||
n.flags = n.flags &^ textCode
|
||||
}
|
||||
}
|
||||
|
||||
// Text implements Node.Text.
|
||||
func (n *String) Text(source []byte) []byte {
|
||||
return n.Value
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *String) Dump(source []byte, level int) {
|
||||
fs := textFlagsString(n.flags)
|
||||
if len(fs) != 0 {
|
||||
fs = "(" + fs + ")"
|
||||
}
|
||||
fmt.Printf("%sString%s: \"%s\"\n", strings.Repeat(" ", level), fs, strings.TrimRight(string(n.Value), "\n"))
|
||||
}
|
||||
|
||||
// KindString is a NodeKind of the String node.
|
||||
var KindString = NewNodeKind("String")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *String) Kind() NodeKind {
|
||||
return KindString
|
||||
}
|
||||
|
||||
// NewString returns a new String node.
|
||||
func NewString(v []byte) *String {
|
||||
return &String{
|
||||
Value: v,
|
||||
}
|
||||
}
|
||||
|
||||
// A CodeSpan struct represents a code span of Markdown text.
|
||||
type CodeSpan struct {
|
||||
BaseInline
|
||||
}
|
||||
|
||||
// Inline implements Inline.Inline .
|
||||
func (n *CodeSpan) Inline() {
|
||||
}
|
||||
|
||||
// IsBlank returns true if this node consists of spaces, otherwise false.
|
||||
func (n *CodeSpan) IsBlank(source []byte) bool {
|
||||
for c := n.FirstChild(); c != nil; c = c.NextSibling() {
|
||||
text := c.(*Text).Segment
|
||||
if !util.IsBlank(text.Value(source)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump
|
||||
func (n *CodeSpan) Dump(source []byte, level int) {
|
||||
DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindCodeSpan is a NodeKind of the CodeSpan node.
|
||||
var KindCodeSpan = NewNodeKind("CodeSpan")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *CodeSpan) Kind() NodeKind {
|
||||
return KindCodeSpan
|
||||
}
|
||||
|
||||
// NewCodeSpan returns a new CodeSpan node.
|
||||
func NewCodeSpan() *CodeSpan {
|
||||
return &CodeSpan{
|
||||
BaseInline: BaseInline{},
|
||||
}
|
||||
}
|
||||
|
||||
// An Emphasis struct represents an emphasis of Markdown text.
|
||||
type Emphasis struct {
|
||||
BaseInline
|
||||
|
||||
// Level is a level of the emphasis.
|
||||
Level int
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *Emphasis) Dump(source []byte, level int) {
|
||||
m := map[string]string{
|
||||
"Level": fmt.Sprintf("%v", n.Level),
|
||||
}
|
||||
DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindEmphasis is a NodeKind of the Emphasis node.
|
||||
var KindEmphasis = NewNodeKind("Emphasis")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Emphasis) Kind() NodeKind {
|
||||
return KindEmphasis
|
||||
}
|
||||
|
||||
// NewEmphasis returns a new Emphasis node with the given level.
|
||||
func NewEmphasis(level int) *Emphasis {
|
||||
return &Emphasis{
|
||||
BaseInline: BaseInline{},
|
||||
Level: level,
|
||||
}
|
||||
}
|
||||
|
||||
type baseLink struct {
|
||||
BaseInline
|
||||
|
||||
// Destination is a destination(URL) of this link.
|
||||
Destination []byte
|
||||
|
||||
// Title is a title of this link.
|
||||
Title []byte
|
||||
}
|
||||
|
||||
// Inline implements Inline.Inline.
|
||||
func (n *baseLink) Inline() {
|
||||
}
|
||||
|
||||
// A Link struct represents a link of the Markdown text.
|
||||
type Link struct {
|
||||
baseLink
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *Link) Dump(source []byte, level int) {
|
||||
m := map[string]string{}
|
||||
m["Destination"] = string(n.Destination)
|
||||
m["Title"] = string(n.Title)
|
||||
DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindLink is a NodeKind of the Link node.
|
||||
var KindLink = NewNodeKind("Link")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Link) Kind() NodeKind {
|
||||
return KindLink
|
||||
}
|
||||
|
||||
// NewLink returns a new Link node.
|
||||
func NewLink() *Link {
|
||||
c := &Link{
|
||||
baseLink: baseLink{
|
||||
BaseInline: BaseInline{},
|
||||
},
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// An Image struct represents an image of the Markdown text.
|
||||
type Image struct {
|
||||
baseLink
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *Image) Dump(source []byte, level int) {
|
||||
m := map[string]string{}
|
||||
m["Destination"] = string(n.Destination)
|
||||
m["Title"] = string(n.Title)
|
||||
DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindImage is a NodeKind of the Image node.
|
||||
var KindImage = NewNodeKind("Image")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Image) Kind() NodeKind {
|
||||
return KindImage
|
||||
}
|
||||
|
||||
// NewImage returns a new Image node.
|
||||
func NewImage(link *Link) *Image {
|
||||
c := &Image{
|
||||
baseLink: baseLink{
|
||||
BaseInline: BaseInline{},
|
||||
},
|
||||
}
|
||||
c.Destination = link.Destination
|
||||
c.Title = link.Title
|
||||
for n := link.FirstChild(); n != nil; {
|
||||
next := n.NextSibling()
|
||||
link.RemoveChild(link, n)
|
||||
c.AppendChild(c, n)
|
||||
n = next
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// AutoLinkType defines kind of auto links.
|
||||
type AutoLinkType int
|
||||
|
||||
const (
|
||||
// AutoLinkEmail indicates that an autolink is an email address.
|
||||
AutoLinkEmail AutoLinkType = iota + 1
|
||||
// AutoLinkURL indicates that an autolink is a generic URL.
|
||||
AutoLinkURL
|
||||
)
|
||||
|
||||
// An AutoLink struct represents an autolink of the Markdown text.
|
||||
type AutoLink struct {
|
||||
BaseInline
|
||||
// Type is a type of this autolink.
|
||||
AutoLinkType AutoLinkType
|
||||
|
||||
// Protocol specified a protocol of the link.
|
||||
Protocol []byte
|
||||
|
||||
value *Text
|
||||
}
|
||||
|
||||
// Inline implements Inline.Inline.
|
||||
func (n *AutoLink) Inline() {}
|
||||
|
||||
// Dump implements Node.Dump
|
||||
func (n *AutoLink) Dump(source []byte, level int) {
|
||||
segment := n.value.Segment
|
||||
m := map[string]string{
|
||||
"Value": string(segment.Value(source)),
|
||||
}
|
||||
DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindAutoLink is a NodeKind of the AutoLink node.
|
||||
var KindAutoLink = NewNodeKind("AutoLink")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *AutoLink) Kind() NodeKind {
|
||||
return KindAutoLink
|
||||
}
|
||||
|
||||
// URL returns an url of this node.
|
||||
func (n *AutoLink) URL(source []byte) []byte {
|
||||
if n.Protocol != nil {
|
||||
s := n.value.Segment
|
||||
ret := make([]byte, 0, len(n.Protocol)+s.Len()+3)
|
||||
ret = append(ret, n.Protocol...)
|
||||
ret = append(ret, ':', '/', '/')
|
||||
ret = append(ret, n.value.Text(source)...)
|
||||
return ret
|
||||
}
|
||||
return n.value.Text(source)
|
||||
}
|
||||
|
||||
// Label returns a label of this node.
|
||||
func (n *AutoLink) Label(source []byte) []byte {
|
||||
return n.value.Text(source)
|
||||
}
|
||||
|
||||
// NewAutoLink returns a new AutoLink node.
|
||||
func NewAutoLink(typ AutoLinkType, value *Text) *AutoLink {
|
||||
return &AutoLink{
|
||||
BaseInline: BaseInline{},
|
||||
value: value,
|
||||
AutoLinkType: typ,
|
||||
}
|
||||
}
|
||||
|
||||
// A RawHTML struct represents an inline raw HTML of the Markdown text.
|
||||
type RawHTML struct {
|
||||
BaseInline
|
||||
Segments *textm.Segments
|
||||
}
|
||||
|
||||
// Inline implements Inline.Inline.
|
||||
func (n *RawHTML) Inline() {}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *RawHTML) Dump(source []byte, level int) {
|
||||
m := map[string]string{}
|
||||
t := []string{}
|
||||
for i := 0; i < n.Segments.Len(); i++ {
|
||||
segment := n.Segments.At(i)
|
||||
t = append(t, string(segment.Value(source)))
|
||||
}
|
||||
m["RawText"] = strings.Join(t, "")
|
||||
DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindRawHTML is a NodeKind of the RawHTML node.
|
||||
var KindRawHTML = NewNodeKind("RawHTML")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *RawHTML) Kind() NodeKind {
|
||||
return KindRawHTML
|
||||
}
|
||||
|
||||
// NewRawHTML returns a new RawHTML node.
|
||||
func NewRawHTML() *RawHTML {
|
||||
return &RawHTML{
|
||||
Segments: textm.NewSegments(),
|
||||
}
|
||||
}
|
|
@ -0,0 +1,83 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
)
|
||||
|
||||
// A DefinitionList struct represents a definition list of Markdown
|
||||
// (PHPMarkdownExtra) text.
|
||||
type DefinitionList struct {
|
||||
gast.BaseBlock
|
||||
Offset int
|
||||
TemporaryParagraph *gast.Paragraph
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *DefinitionList) Dump(source []byte, level int) {
|
||||
gast.DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindDefinitionList is a NodeKind of the DefinitionList node.
|
||||
var KindDefinitionList = gast.NewNodeKind("DefinitionList")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *DefinitionList) Kind() gast.NodeKind {
|
||||
return KindDefinitionList
|
||||
}
|
||||
|
||||
// NewDefinitionList returns a new DefinitionList node.
|
||||
func NewDefinitionList(offset int, para *gast.Paragraph) *DefinitionList {
|
||||
return &DefinitionList{
|
||||
Offset: offset,
|
||||
TemporaryParagraph: para,
|
||||
}
|
||||
}
|
||||
|
||||
// A DefinitionTerm struct represents a definition list term of Markdown
|
||||
// (PHPMarkdownExtra) text.
|
||||
type DefinitionTerm struct {
|
||||
gast.BaseBlock
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *DefinitionTerm) Dump(source []byte, level int) {
|
||||
gast.DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindDefinitionTerm is a NodeKind of the DefinitionTerm node.
|
||||
var KindDefinitionTerm = gast.NewNodeKind("DefinitionTerm")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *DefinitionTerm) Kind() gast.NodeKind {
|
||||
return KindDefinitionTerm
|
||||
}
|
||||
|
||||
// NewDefinitionTerm returns a new DefinitionTerm node.
|
||||
func NewDefinitionTerm() *DefinitionTerm {
|
||||
return &DefinitionTerm{}
|
||||
}
|
||||
|
||||
// A DefinitionDescription struct represents a definition list description of Markdown
|
||||
// (PHPMarkdownExtra) text.
|
||||
type DefinitionDescription struct {
|
||||
gast.BaseBlock
|
||||
IsTight bool
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *DefinitionDescription) Dump(source []byte, level int) {
|
||||
gast.DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindDefinitionDescription is a NodeKind of the DefinitionDescription node.
|
||||
var KindDefinitionDescription = gast.NewNodeKind("DefinitionDescription")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *DefinitionDescription) Kind() gast.NodeKind {
|
||||
return KindDefinitionDescription
|
||||
}
|
||||
|
||||
// NewDefinitionDescription returns a new DefinitionDescription node.
|
||||
func NewDefinitionDescription() *DefinitionDescription {
|
||||
return &DefinitionDescription{}
|
||||
}
|
|
@ -0,0 +1,138 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
)
|
||||
|
||||
// A FootnoteLink struct represents a link to a footnote of Markdown
|
||||
// (PHP Markdown Extra) text.
|
||||
type FootnoteLink struct {
|
||||
gast.BaseInline
|
||||
Index int
|
||||
RefCount int
|
||||
RefIndex int
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *FootnoteLink) Dump(source []byte, level int) {
|
||||
m := map[string]string{}
|
||||
m["Index"] = fmt.Sprintf("%v", n.Index)
|
||||
m["RefCount"] = fmt.Sprintf("%v", n.RefCount)
|
||||
m["RefIndex"] = fmt.Sprintf("%v", n.RefIndex)
|
||||
gast.DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindFootnoteLink is a NodeKind of the FootnoteLink node.
|
||||
var KindFootnoteLink = gast.NewNodeKind("FootnoteLink")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *FootnoteLink) Kind() gast.NodeKind {
|
||||
return KindFootnoteLink
|
||||
}
|
||||
|
||||
// NewFootnoteLink returns a new FootnoteLink node.
|
||||
func NewFootnoteLink(index int) *FootnoteLink {
|
||||
return &FootnoteLink{
|
||||
Index: index,
|
||||
RefCount: 0,
|
||||
RefIndex: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// A FootnoteBacklink struct represents a link to a footnote of Markdown
|
||||
// (PHP Markdown Extra) text.
|
||||
type FootnoteBacklink struct {
|
||||
gast.BaseInline
|
||||
Index int
|
||||
RefCount int
|
||||
RefIndex int
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *FootnoteBacklink) Dump(source []byte, level int) {
|
||||
m := map[string]string{}
|
||||
m["Index"] = fmt.Sprintf("%v", n.Index)
|
||||
m["RefCount"] = fmt.Sprintf("%v", n.RefCount)
|
||||
m["RefIndex"] = fmt.Sprintf("%v", n.RefIndex)
|
||||
gast.DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindFootnoteBacklink is a NodeKind of the FootnoteBacklink node.
|
||||
var KindFootnoteBacklink = gast.NewNodeKind("FootnoteBacklink")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *FootnoteBacklink) Kind() gast.NodeKind {
|
||||
return KindFootnoteBacklink
|
||||
}
|
||||
|
||||
// NewFootnoteBacklink returns a new FootnoteBacklink node.
|
||||
func NewFootnoteBacklink(index int) *FootnoteBacklink {
|
||||
return &FootnoteBacklink{
|
||||
Index: index,
|
||||
RefCount: 0,
|
||||
RefIndex: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// A Footnote struct represents a footnote of Markdown
|
||||
// (PHP Markdown Extra) text.
|
||||
type Footnote struct {
|
||||
gast.BaseBlock
|
||||
Ref []byte
|
||||
Index int
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *Footnote) Dump(source []byte, level int) {
|
||||
m := map[string]string{}
|
||||
m["Index"] = fmt.Sprintf("%v", n.Index)
|
||||
m["Ref"] = fmt.Sprintf("%s", n.Ref)
|
||||
gast.DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindFootnote is a NodeKind of the Footnote node.
|
||||
var KindFootnote = gast.NewNodeKind("Footnote")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Footnote) Kind() gast.NodeKind {
|
||||
return KindFootnote
|
||||
}
|
||||
|
||||
// NewFootnote returns a new Footnote node.
|
||||
func NewFootnote(ref []byte) *Footnote {
|
||||
return &Footnote{
|
||||
Ref: ref,
|
||||
Index: -1,
|
||||
}
|
||||
}
|
||||
|
||||
// A FootnoteList struct represents footnotes of Markdown
|
||||
// (PHP Markdown Extra) text.
|
||||
type FootnoteList struct {
|
||||
gast.BaseBlock
|
||||
Count int
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *FootnoteList) Dump(source []byte, level int) {
|
||||
m := map[string]string{}
|
||||
m["Count"] = fmt.Sprintf("%v", n.Count)
|
||||
gast.DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindFootnoteList is a NodeKind of the FootnoteList node.
|
||||
var KindFootnoteList = gast.NewNodeKind("FootnoteList")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *FootnoteList) Kind() gast.NodeKind {
|
||||
return KindFootnoteList
|
||||
}
|
||||
|
||||
// NewFootnoteList returns a new FootnoteList node.
|
||||
func NewFootnoteList() *FootnoteList {
|
||||
return &FootnoteList{
|
||||
Count: 0,
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
// Package ast defines AST nodes that represents extension's elements
|
||||
package ast
|
||||
|
||||
import (
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
)
|
||||
|
||||
// A Strikethrough struct represents a strikethrough of GFM text.
|
||||
type Strikethrough struct {
|
||||
gast.BaseInline
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *Strikethrough) Dump(source []byte, level int) {
|
||||
gast.DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindStrikethrough is a NodeKind of the Strikethrough node.
|
||||
var KindStrikethrough = gast.NewNodeKind("Strikethrough")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Strikethrough) Kind() gast.NodeKind {
|
||||
return KindStrikethrough
|
||||
}
|
||||
|
||||
// NewStrikethrough returns a new Strikethrough node.
|
||||
func NewStrikethrough() *Strikethrough {
|
||||
return &Strikethrough{}
|
||||
}
|
|
@ -0,0 +1,157 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Alignment is a text alignment of table cells.
|
||||
type Alignment int
|
||||
|
||||
const (
|
||||
// AlignLeft indicates text should be left justified.
|
||||
AlignLeft Alignment = iota + 1
|
||||
|
||||
// AlignRight indicates text should be right justified.
|
||||
AlignRight
|
||||
|
||||
// AlignCenter indicates text should be centered.
|
||||
AlignCenter
|
||||
|
||||
// AlignNone indicates text should be aligned by default manner.
|
||||
AlignNone
|
||||
)
|
||||
|
||||
func (a Alignment) String() string {
|
||||
switch a {
|
||||
case AlignLeft:
|
||||
return "left"
|
||||
case AlignRight:
|
||||
return "right"
|
||||
case AlignCenter:
|
||||
return "center"
|
||||
case AlignNone:
|
||||
return "none"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// A Table struct represents a table of Markdown(GFM) text.
|
||||
type Table struct {
|
||||
gast.BaseBlock
|
||||
|
||||
// Alignments returns alignments of the columns.
|
||||
Alignments []Alignment
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump
|
||||
func (n *Table) Dump(source []byte, level int) {
|
||||
gast.DumpHelper(n, source, level, nil, func(level int) {
|
||||
indent := strings.Repeat(" ", level)
|
||||
fmt.Printf("%sAlignments {\n", indent)
|
||||
for i, alignment := range n.Alignments {
|
||||
indent2 := strings.Repeat(" ", level+1)
|
||||
fmt.Printf("%s%s", indent2, alignment.String())
|
||||
if i != len(n.Alignments)-1 {
|
||||
fmt.Println("")
|
||||
}
|
||||
}
|
||||
fmt.Printf("\n%s}\n", indent)
|
||||
})
|
||||
}
|
||||
|
||||
// KindTable is a NodeKind of the Table node.
|
||||
var KindTable = gast.NewNodeKind("Table")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Table) Kind() gast.NodeKind {
|
||||
return KindTable
|
||||
}
|
||||
|
||||
// NewTable returns a new Table node.
|
||||
func NewTable() *Table {
|
||||
return &Table{
|
||||
Alignments: []Alignment{},
|
||||
}
|
||||
}
|
||||
|
||||
// A TableRow struct represents a table row of Markdown(GFM) text.
|
||||
type TableRow struct {
|
||||
gast.BaseBlock
|
||||
Alignments []Alignment
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *TableRow) Dump(source []byte, level int) {
|
||||
gast.DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindTableRow is a NodeKind of the TableRow node.
|
||||
var KindTableRow = gast.NewNodeKind("TableRow")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *TableRow) Kind() gast.NodeKind {
|
||||
return KindTableRow
|
||||
}
|
||||
|
||||
// NewTableRow returns a new TableRow node.
|
||||
func NewTableRow(alignments []Alignment) *TableRow {
|
||||
return &TableRow{}
|
||||
}
|
||||
|
||||
// A TableHeader struct represents a table header of Markdown(GFM) text.
|
||||
type TableHeader struct {
|
||||
gast.BaseBlock
|
||||
Alignments []Alignment
|
||||
}
|
||||
|
||||
// KindTableHeader is a NodeKind of the TableHeader node.
|
||||
var KindTableHeader = gast.NewNodeKind("TableHeader")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *TableHeader) Kind() gast.NodeKind {
|
||||
return KindTableHeader
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *TableHeader) Dump(source []byte, level int) {
|
||||
gast.DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// NewTableHeader returns a new TableHeader node.
|
||||
func NewTableHeader(row *TableRow) *TableHeader {
|
||||
n := &TableHeader{}
|
||||
for c := row.FirstChild(); c != nil; {
|
||||
next := c.NextSibling()
|
||||
n.AppendChild(n, c)
|
||||
c = next
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// A TableCell struct represents a table cell of a Markdown(GFM) text.
|
||||
type TableCell struct {
|
||||
gast.BaseBlock
|
||||
Alignment Alignment
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *TableCell) Dump(source []byte, level int) {
|
||||
gast.DumpHelper(n, source, level, nil, nil)
|
||||
}
|
||||
|
||||
// KindTableCell is a NodeKind of the TableCell node.
|
||||
var KindTableCell = gast.NewNodeKind("TableCell")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *TableCell) Kind() gast.NodeKind {
|
||||
return KindTableCell
|
||||
}
|
||||
|
||||
// NewTableCell returns a new TableCell node.
|
||||
func NewTableCell() *TableCell {
|
||||
return &TableCell{
|
||||
Alignment: AlignNone,
|
||||
}
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
)
|
||||
|
||||
// A TaskCheckBox struct represents a checkbox of a task list.
|
||||
type TaskCheckBox struct {
|
||||
gast.BaseInline
|
||||
IsChecked bool
|
||||
}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *TaskCheckBox) Dump(source []byte, level int) {
|
||||
m := map[string]string{
|
||||
"Checked": fmt.Sprintf("%v", n.IsChecked),
|
||||
}
|
||||
gast.DumpHelper(n, source, level, m, nil)
|
||||
}
|
||||
|
||||
// KindTaskCheckBox is a NodeKind of the TaskCheckBox node.
|
||||
var KindTaskCheckBox = gast.NewNodeKind("TaskCheckBox")
|
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *TaskCheckBox) Kind() gast.NodeKind {
|
||||
return KindTaskCheckBox
|
||||
}
|
||||
|
||||
// NewTaskCheckBox returns a new TaskCheckBox node.
|
||||
func NewTaskCheckBox(checked bool) *TaskCheckBox {
|
||||
return &TaskCheckBox{
|
||||
IsChecked: checked,
|
||||
}
|
||||
}
|
|
@ -0,0 +1,270 @@
|
|||
package extension
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark"
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/extension/ast"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type definitionListParser struct {
|
||||
}
|
||||
|
||||
var defaultDefinitionListParser = &definitionListParser{}
|
||||
|
||||
// NewDefinitionListParser return a new parser.BlockParser that
|
||||
// can parse PHP Markdown Extra Definition lists.
|
||||
func NewDefinitionListParser() parser.BlockParser {
|
||||
return defaultDefinitionListParser
|
||||
}
|
||||
|
||||
func (b *definitionListParser) Trigger() []byte {
|
||||
return []byte{':'}
|
||||
}
|
||||
|
||||
func (b *definitionListParser) Open(parent gast.Node, reader text.Reader, pc parser.Context) (gast.Node, parser.State) {
|
||||
if _, ok := parent.(*ast.DefinitionList); ok {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
line, _ := reader.PeekLine()
|
||||
pos := pc.BlockOffset()
|
||||
indent := pc.BlockIndent()
|
||||
if pos < 0 || line[pos] != ':' || indent != 0 {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
|
||||
last := parent.LastChild()
|
||||
// need 1 or more spaces after ':'
|
||||
w, _ := util.IndentWidth(line[pos+1:], pos+1)
|
||||
if w < 1 {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
if w >= 8 { // starts with indented code
|
||||
w = 5
|
||||
}
|
||||
w += pos + 1 /* 1 = ':' */
|
||||
|
||||
para, lastIsParagraph := last.(*gast.Paragraph)
|
||||
var list *ast.DefinitionList
|
||||
status := parser.HasChildren
|
||||
var ok bool
|
||||
if lastIsParagraph {
|
||||
list, ok = last.PreviousSibling().(*ast.DefinitionList)
|
||||
if ok { // is not first item
|
||||
list.Offset = w
|
||||
list.TemporaryParagraph = para
|
||||
} else { // is first item
|
||||
list = ast.NewDefinitionList(w, para)
|
||||
status |= parser.RequireParagraph
|
||||
}
|
||||
} else if list, ok = last.(*ast.DefinitionList); ok { // multiple description
|
||||
list.Offset = w
|
||||
list.TemporaryParagraph = nil
|
||||
} else {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
|
||||
return list, status
|
||||
}
|
||||
|
||||
func (b *definitionListParser) Continue(node gast.Node, reader text.Reader, pc parser.Context) parser.State {
|
||||
line, _ := reader.PeekLine()
|
||||
if util.IsBlank(line) {
|
||||
return parser.Continue | parser.HasChildren
|
||||
}
|
||||
list, _ := node.(*ast.DefinitionList)
|
||||
w, _ := util.IndentWidth(line, reader.LineOffset())
|
||||
if w < list.Offset {
|
||||
return parser.Close
|
||||
}
|
||||
pos, padding := util.IndentPosition(line, reader.LineOffset(), list.Offset)
|
||||
reader.AdvanceAndSetPadding(pos, padding)
|
||||
return parser.Continue | parser.HasChildren
|
||||
}
|
||||
|
||||
func (b *definitionListParser) Close(node gast.Node, reader text.Reader, pc parser.Context) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
func (b *definitionListParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *definitionListParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type definitionDescriptionParser struct {
|
||||
}
|
||||
|
||||
var defaultDefinitionDescriptionParser = &definitionDescriptionParser{}
|
||||
|
||||
// NewDefinitionDescriptionParser return a new parser.BlockParser that
|
||||
// can parse definition description starts with ':'.
|
||||
func NewDefinitionDescriptionParser() parser.BlockParser {
|
||||
return defaultDefinitionDescriptionParser
|
||||
}
|
||||
|
||||
func (b *definitionDescriptionParser) Trigger() []byte {
|
||||
return []byte{':'}
|
||||
}
|
||||
|
||||
func (b *definitionDescriptionParser) Open(parent gast.Node, reader text.Reader, pc parser.Context) (gast.Node, parser.State) {
|
||||
line, _ := reader.PeekLine()
|
||||
pos := pc.BlockOffset()
|
||||
indent := pc.BlockIndent()
|
||||
if pos < 0 || line[pos] != ':' || indent != 0 {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
list, _ := parent.(*ast.DefinitionList)
|
||||
if list == nil {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
para := list.TemporaryParagraph
|
||||
list.TemporaryParagraph = nil
|
||||
if para != nil {
|
||||
lines := para.Lines()
|
||||
l := lines.Len()
|
||||
for i := 0; i < l; i++ {
|
||||
term := ast.NewDefinitionTerm()
|
||||
segment := lines.At(i)
|
||||
term.Lines().Append(segment.TrimRightSpace(reader.Source()))
|
||||
list.AppendChild(list, term)
|
||||
}
|
||||
para.Parent().RemoveChild(para.Parent(), para)
|
||||
}
|
||||
cpos, padding := util.IndentPosition(line[pos+1:], pos+1, list.Offset-pos-1)
|
||||
reader.AdvanceAndSetPadding(cpos+1, padding)
|
||||
|
||||
return ast.NewDefinitionDescription(), parser.HasChildren
|
||||
}
|
||||
|
||||
func (b *definitionDescriptionParser) Continue(node gast.Node, reader text.Reader, pc parser.Context) parser.State {
|
||||
// definitionListParser detects end of the description.
|
||||
// so this method will never be called.
|
||||
return parser.Continue | parser.HasChildren
|
||||
}
|
||||
|
||||
func (b *definitionDescriptionParser) Close(node gast.Node, reader text.Reader, pc parser.Context) {
|
||||
desc := node.(*ast.DefinitionDescription)
|
||||
desc.IsTight = !desc.HasBlankPreviousLines()
|
||||
if desc.IsTight {
|
||||
for gc := desc.FirstChild(); gc != nil; gc = gc.NextSibling() {
|
||||
paragraph, ok := gc.(*gast.Paragraph)
|
||||
if ok {
|
||||
textBlock := gast.NewTextBlock()
|
||||
textBlock.SetLines(paragraph.Lines())
|
||||
desc.ReplaceChild(desc, paragraph, textBlock)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *definitionDescriptionParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *definitionDescriptionParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// DefinitionListHTMLRenderer is a renderer.NodeRenderer implementation that
|
||||
// renders DefinitionList nodes.
|
||||
type DefinitionListHTMLRenderer struct {
|
||||
html.Config
|
||||
}
|
||||
|
||||
// NewDefinitionListHTMLRenderer returns a new DefinitionListHTMLRenderer.
|
||||
func NewDefinitionListHTMLRenderer(opts ...html.Option) renderer.NodeRenderer {
|
||||
r := &DefinitionListHTMLRenderer{
|
||||
Config: html.NewConfig(),
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt.SetHTMLOption(&r.Config)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs.
|
||||
func (r *DefinitionListHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindDefinitionList, r.renderDefinitionList)
|
||||
reg.Register(ast.KindDefinitionTerm, r.renderDefinitionTerm)
|
||||
reg.Register(ast.KindDefinitionDescription, r.renderDefinitionDescription)
|
||||
}
|
||||
|
||||
// DefinitionListAttributeFilter defines attribute names which dl elements can have.
|
||||
var DefinitionListAttributeFilter = html.GlobalAttributeFilter
|
||||
|
||||
func (r *DefinitionListHTMLRenderer) renderDefinitionList(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
if n.Attributes() != nil {
|
||||
_, _ = w.WriteString("<dl")
|
||||
html.RenderAttributes(w, n, DefinitionListAttributeFilter)
|
||||
_, _ = w.WriteString(">\n")
|
||||
} else {
|
||||
_, _ = w.WriteString("<dl>\n")
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("</dl>\n")
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// DefinitionTermAttributeFilter defines attribute names which dd elements can have.
|
||||
var DefinitionTermAttributeFilter = html.GlobalAttributeFilter
|
||||
|
||||
func (r *DefinitionListHTMLRenderer) renderDefinitionTerm(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
if n.Attributes() != nil {
|
||||
_, _ = w.WriteString("<dt")
|
||||
html.RenderAttributes(w, n, DefinitionTermAttributeFilter)
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("<dt>")
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("</dt>\n")
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// DefinitionDescriptionAttributeFilter defines attribute names which dd elements can have.
|
||||
var DefinitionDescriptionAttributeFilter = html.GlobalAttributeFilter
|
||||
|
||||
func (r *DefinitionListHTMLRenderer) renderDefinitionDescription(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
n := node.(*ast.DefinitionDescription)
|
||||
_, _ = w.WriteString("<dd")
|
||||
if n.Attributes() != nil {
|
||||
html.RenderAttributes(w, n, DefinitionDescriptionAttributeFilter)
|
||||
}
|
||||
if n.IsTight {
|
||||
_, _ = w.WriteString(">")
|
||||
} else {
|
||||
_, _ = w.WriteString(">\n")
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("</dd>\n")
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
type definitionList struct {
|
||||
}
|
||||
|
||||
// DefinitionList is an extension that allow you to use PHP Markdown Extra Definition lists.
|
||||
var DefinitionList = &definitionList{}
|
||||
|
||||
func (e *definitionList) Extend(m goldmark.Markdown) {
|
||||
m.Parser().AddOptions(parser.WithBlockParsers(
|
||||
util.Prioritized(NewDefinitionListParser(), 101),
|
||||
util.Prioritized(NewDefinitionDescriptionParser(), 102),
|
||||
))
|
||||
m.Renderer().AddOptions(renderer.WithNodeRenderers(
|
||||
util.Prioritized(NewDefinitionListHTMLRenderer(), 500),
|
||||
))
|
||||
}
|
|
@ -0,0 +1,687 @@
|
|||
package extension
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/yuin/goldmark"
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/extension/ast"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var footnoteListKey = parser.NewContextKey()
|
||||
var footnoteLinkListKey = parser.NewContextKey()
|
||||
|
||||
type footnoteBlockParser struct {
|
||||
}
|
||||
|
||||
var defaultFootnoteBlockParser = &footnoteBlockParser{}
|
||||
|
||||
// NewFootnoteBlockParser returns a new parser.BlockParser that can parse
|
||||
// footnotes of the Markdown(PHP Markdown Extra) text.
|
||||
func NewFootnoteBlockParser() parser.BlockParser {
|
||||
return defaultFootnoteBlockParser
|
||||
}
|
||||
|
||||
func (b *footnoteBlockParser) Trigger() []byte {
|
||||
return []byte{'['}
|
||||
}
|
||||
|
||||
func (b *footnoteBlockParser) Open(parent gast.Node, reader text.Reader, pc parser.Context) (gast.Node, parser.State) {
|
||||
line, segment := reader.PeekLine()
|
||||
pos := pc.BlockOffset()
|
||||
if pos < 0 || line[pos] != '[' {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
pos++
|
||||
if pos > len(line)-1 || line[pos] != '^' {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
open := pos + 1
|
||||
closes := 0
|
||||
closure := util.FindClosure(line[pos+1:], '[', ']', false, false)
|
||||
closes = pos + 1 + closure
|
||||
next := closes + 1
|
||||
if closure > -1 {
|
||||
if next >= len(line) || line[next] != ':' {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
} else {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
padding := segment.Padding
|
||||
label := reader.Value(text.NewSegment(segment.Start+open-padding, segment.Start+closes-padding))
|
||||
if util.IsBlank(label) {
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
item := ast.NewFootnote(label)
|
||||
|
||||
pos = next + 1 - padding
|
||||
if pos >= len(line) {
|
||||
reader.Advance(pos)
|
||||
return item, parser.NoChildren
|
||||
}
|
||||
reader.AdvanceAndSetPadding(pos, padding)
|
||||
return item, parser.HasChildren
|
||||
}
|
||||
|
||||
func (b *footnoteBlockParser) Continue(node gast.Node, reader text.Reader, pc parser.Context) parser.State {
|
||||
line, _ := reader.PeekLine()
|
||||
if util.IsBlank(line) {
|
||||
return parser.Continue | parser.HasChildren
|
||||
}
|
||||
childpos, padding := util.IndentPosition(line, reader.LineOffset(), 4)
|
||||
if childpos < 0 {
|
||||
return parser.Close
|
||||
}
|
||||
reader.AdvanceAndSetPadding(childpos, padding)
|
||||
return parser.Continue | parser.HasChildren
|
||||
}
|
||||
|
||||
func (b *footnoteBlockParser) Close(node gast.Node, reader text.Reader, pc parser.Context) {
|
||||
var list *ast.FootnoteList
|
||||
if tlist := pc.Get(footnoteListKey); tlist != nil {
|
||||
list = tlist.(*ast.FootnoteList)
|
||||
} else {
|
||||
list = ast.NewFootnoteList()
|
||||
pc.Set(footnoteListKey, list)
|
||||
node.Parent().InsertBefore(node.Parent(), node, list)
|
||||
}
|
||||
node.Parent().RemoveChild(node.Parent(), node)
|
||||
list.AppendChild(list, node)
|
||||
}
|
||||
|
||||
func (b *footnoteBlockParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *footnoteBlockParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type footnoteParser struct {
|
||||
}
|
||||
|
||||
var defaultFootnoteParser = &footnoteParser{}
|
||||
|
||||
// NewFootnoteParser returns a new parser.InlineParser that can parse
|
||||
// footnote links of the Markdown(PHP Markdown Extra) text.
|
||||
func NewFootnoteParser() parser.InlineParser {
|
||||
return defaultFootnoteParser
|
||||
}
|
||||
|
||||
func (s *footnoteParser) Trigger() []byte {
|
||||
// footnote syntax probably conflict with the image syntax.
|
||||
// So we need trigger this parser with '!'.
|
||||
return []byte{'!', '['}
|
||||
}
|
||||
|
||||
func (s *footnoteParser) Parse(parent gast.Node, block text.Reader, pc parser.Context) gast.Node {
|
||||
line, segment := block.PeekLine()
|
||||
pos := 1
|
||||
if len(line) > 0 && line[0] == '!' {
|
||||
pos++
|
||||
}
|
||||
if pos >= len(line) || line[pos] != '^' {
|
||||
return nil
|
||||
}
|
||||
pos++
|
||||
if pos >= len(line) {
|
||||
return nil
|
||||
}
|
||||
open := pos
|
||||
closure := util.FindClosure(line[pos:], '[', ']', false, false)
|
||||
if closure < 0 {
|
||||
return nil
|
||||
}
|
||||
closes := pos + closure
|
||||
value := block.Value(text.NewSegment(segment.Start+open, segment.Start+closes))
|
||||
block.Advance(closes + 1)
|
||||
|
||||
var list *ast.FootnoteList
|
||||
if tlist := pc.Get(footnoteListKey); tlist != nil {
|
||||
list = tlist.(*ast.FootnoteList)
|
||||
}
|
||||
if list == nil {
|
||||
return nil
|
||||
}
|
||||
index := 0
|
||||
for def := list.FirstChild(); def != nil; def = def.NextSibling() {
|
||||
d := def.(*ast.Footnote)
|
||||
if bytes.Equal(d.Ref, value) {
|
||||
if d.Index < 0 {
|
||||
list.Count += 1
|
||||
d.Index = list.Count
|
||||
}
|
||||
index = d.Index
|
||||
break
|
||||
}
|
||||
}
|
||||
if index == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
fnlink := ast.NewFootnoteLink(index)
|
||||
var fnlist []*ast.FootnoteLink
|
||||
if tmp := pc.Get(footnoteLinkListKey); tmp != nil {
|
||||
fnlist = tmp.([]*ast.FootnoteLink)
|
||||
} else {
|
||||
fnlist = []*ast.FootnoteLink{}
|
||||
pc.Set(footnoteLinkListKey, fnlist)
|
||||
}
|
||||
pc.Set(footnoteLinkListKey, append(fnlist, fnlink))
|
||||
if line[0] == '!' {
|
||||
parent.AppendChild(parent, gast.NewTextSegment(text.NewSegment(segment.Start, segment.Start+1)))
|
||||
}
|
||||
|
||||
return fnlink
|
||||
}
|
||||
|
||||
type footnoteASTTransformer struct {
|
||||
}
|
||||
|
||||
var defaultFootnoteASTTransformer = &footnoteASTTransformer{}
|
||||
|
||||
// NewFootnoteASTTransformer returns a new parser.ASTTransformer that
|
||||
// insert a footnote list to the last of the document.
|
||||
func NewFootnoteASTTransformer() parser.ASTTransformer {
|
||||
return defaultFootnoteASTTransformer
|
||||
}
|
||||
|
||||
func (a *footnoteASTTransformer) Transform(node *gast.Document, reader text.Reader, pc parser.Context) {
|
||||
var list *ast.FootnoteList
|
||||
var fnlist []*ast.FootnoteLink
|
||||
if tmp := pc.Get(footnoteListKey); tmp != nil {
|
||||
list = tmp.(*ast.FootnoteList)
|
||||
}
|
||||
if tmp := pc.Get(footnoteLinkListKey); tmp != nil {
|
||||
fnlist = tmp.([]*ast.FootnoteLink)
|
||||
}
|
||||
|
||||
pc.Set(footnoteListKey, nil)
|
||||
pc.Set(footnoteLinkListKey, nil)
|
||||
|
||||
if list == nil {
|
||||
return
|
||||
}
|
||||
|
||||
counter := map[int]int{}
|
||||
if fnlist != nil {
|
||||
for _, fnlink := range fnlist {
|
||||
if fnlink.Index >= 0 {
|
||||
counter[fnlink.Index]++
|
||||
}
|
||||
}
|
||||
refCounter := map[int]int{}
|
||||
for _, fnlink := range fnlist {
|
||||
fnlink.RefCount = counter[fnlink.Index]
|
||||
if _, ok := refCounter[fnlink.Index]; !ok {
|
||||
refCounter[fnlink.Index] = 0
|
||||
}
|
||||
fnlink.RefIndex = refCounter[fnlink.Index]
|
||||
refCounter[fnlink.Index]++
|
||||
}
|
||||
}
|
||||
for footnote := list.FirstChild(); footnote != nil; {
|
||||
var container gast.Node = footnote
|
||||
next := footnote.NextSibling()
|
||||
if fc := container.LastChild(); fc != nil && gast.IsParagraph(fc) {
|
||||
container = fc
|
||||
}
|
||||
fn := footnote.(*ast.Footnote)
|
||||
index := fn.Index
|
||||
if index < 0 {
|
||||
list.RemoveChild(list, footnote)
|
||||
} else {
|
||||
refCount := counter[index]
|
||||
backLink := ast.NewFootnoteBacklink(index)
|
||||
backLink.RefCount = refCount
|
||||
backLink.RefIndex = 0
|
||||
container.AppendChild(container, backLink)
|
||||
if refCount > 1 {
|
||||
for i := 1; i < refCount; i++ {
|
||||
backLink := ast.NewFootnoteBacklink(index)
|
||||
backLink.RefCount = refCount
|
||||
backLink.RefIndex = i
|
||||
container.AppendChild(container, backLink)
|
||||
}
|
||||
}
|
||||
}
|
||||
footnote = next
|
||||
}
|
||||
list.SortChildren(func(n1, n2 gast.Node) int {
|
||||
if n1.(*ast.Footnote).Index < n2.(*ast.Footnote).Index {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
})
|
||||
if list.Count <= 0 {
|
||||
list.Parent().RemoveChild(list.Parent(), list)
|
||||
return
|
||||
}
|
||||
|
||||
node.AppendChild(node, list)
|
||||
}
|
||||
|
||||
// FootnoteConfig holds configuration values for the footnote extension.
|
||||
//
|
||||
// Link* and Backlink* configurations have some variables:
|
||||
// Occurrances of “^^” in the string will be replaced by the
|
||||
// corresponding footnote number in the HTML output.
|
||||
// Occurrances of “%%” will be replaced by a number for the
|
||||
// reference (footnotes can have multiple references).
|
||||
type FootnoteConfig struct {
|
||||
html.Config
|
||||
|
||||
// IDPrefix is a prefix for the id attributes generated by footnotes.
|
||||
IDPrefix []byte
|
||||
|
||||
// IDPrefix is a function that determines the id attribute for given Node.
|
||||
IDPrefixFunction func(gast.Node) []byte
|
||||
|
||||
// LinkTitle is an optional title attribute for footnote links.
|
||||
LinkTitle []byte
|
||||
|
||||
// BacklinkTitle is an optional title attribute for footnote backlinks.
|
||||
BacklinkTitle []byte
|
||||
|
||||
// LinkClass is a class for footnote links.
|
||||
LinkClass []byte
|
||||
|
||||
// BacklinkClass is a class for footnote backlinks.
|
||||
BacklinkClass []byte
|
||||
|
||||
// BacklinkHTML is an HTML content for footnote backlinks.
|
||||
BacklinkHTML []byte
|
||||
}
|
||||
|
||||
// FootnoteOption interface is a functional option interface for the extension.
|
||||
type FootnoteOption interface {
|
||||
renderer.Option
|
||||
// SetFootnoteOption sets given option to the extension.
|
||||
SetFootnoteOption(*FootnoteConfig)
|
||||
}
|
||||
|
||||
// NewFootnoteConfig returns a new Config with defaults.
|
||||
func NewFootnoteConfig() FootnoteConfig {
|
||||
return FootnoteConfig{
|
||||
Config: html.NewConfig(),
|
||||
LinkTitle: []byte(""),
|
||||
BacklinkTitle: []byte(""),
|
||||
LinkClass: []byte("footnote-ref"),
|
||||
BacklinkClass: []byte("footnote-backref"),
|
||||
BacklinkHTML: []byte("↩︎"),
|
||||
}
|
||||
}
|
||||
|
||||
// SetOption implements renderer.SetOptioner.
|
||||
func (c *FootnoteConfig) SetOption(name renderer.OptionName, value interface{}) {
|
||||
switch name {
|
||||
case optFootnoteIDPrefixFunction:
|
||||
c.IDPrefixFunction = value.(func(gast.Node) []byte)
|
||||
case optFootnoteIDPrefix:
|
||||
c.IDPrefix = value.([]byte)
|
||||
case optFootnoteLinkTitle:
|
||||
c.LinkTitle = value.([]byte)
|
||||
case optFootnoteBacklinkTitle:
|
||||
c.BacklinkTitle = value.([]byte)
|
||||
case optFootnoteLinkClass:
|
||||
c.LinkClass = value.([]byte)
|
||||
case optFootnoteBacklinkClass:
|
||||
c.BacklinkClass = value.([]byte)
|
||||
case optFootnoteBacklinkHTML:
|
||||
c.BacklinkHTML = value.([]byte)
|
||||
default:
|
||||
c.Config.SetOption(name, value)
|
||||
}
|
||||
}
|
||||
|
||||
type withFootnoteHTMLOptions struct {
|
||||
value []html.Option
|
||||
}
|
||||
|
||||
func (o *withFootnoteHTMLOptions) SetConfig(c *renderer.Config) {
|
||||
if o.value != nil {
|
||||
for _, v := range o.value {
|
||||
v.(renderer.Option).SetConfig(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (o *withFootnoteHTMLOptions) SetFootnoteOption(c *FootnoteConfig) {
|
||||
if o.value != nil {
|
||||
for _, v := range o.value {
|
||||
v.SetHTMLOption(&c.Config)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WithFootnoteHTMLOptions is functional option that wraps goldmark HTMLRenderer options.
|
||||
func WithFootnoteHTMLOptions(opts ...html.Option) FootnoteOption {
|
||||
return &withFootnoteHTMLOptions{opts}
|
||||
}
|
||||
|
||||
const optFootnoteIDPrefix renderer.OptionName = "FootnoteIDPrefix"
|
||||
|
||||
type withFootnoteIDPrefix struct {
|
||||
value []byte
|
||||
}
|
||||
|
||||
func (o *withFootnoteIDPrefix) SetConfig(c *renderer.Config) {
|
||||
c.Options[optFootnoteIDPrefix] = o.value
|
||||
}
|
||||
|
||||
func (o *withFootnoteIDPrefix) SetFootnoteOption(c *FootnoteConfig) {
|
||||
c.IDPrefix = o.value
|
||||
}
|
||||
|
||||
// WithFootnoteIDPrefix is a functional option that is a prefix for the id attributes generated by footnotes.
|
||||
func WithFootnoteIDPrefix(a []byte) FootnoteOption {
|
||||
return &withFootnoteIDPrefix{a}
|
||||
}
|
||||
|
||||
const optFootnoteIDPrefixFunction renderer.OptionName = "FootnoteIDPrefixFunction"
|
||||
|
||||
type withFootnoteIDPrefixFunction struct {
|
||||
value func(gast.Node) []byte
|
||||
}
|
||||
|
||||
func (o *withFootnoteIDPrefixFunction) SetConfig(c *renderer.Config) {
|
||||
c.Options[optFootnoteIDPrefixFunction] = o.value
|
||||
}
|
||||
|
||||
func (o *withFootnoteIDPrefixFunction) SetFootnoteOption(c *FootnoteConfig) {
|
||||
c.IDPrefixFunction = o.value
|
||||
}
|
||||
|
||||
// WithFootnoteIDPrefixFunction is a functional option that is a prefix for the id attributes generated by footnotes.
|
||||
func WithFootnoteIDPrefixFunction(a func(gast.Node) []byte) FootnoteOption {
|
||||
return &withFootnoteIDPrefixFunction{a}
|
||||
}
|
||||
|
||||
const optFootnoteLinkTitle renderer.OptionName = "FootnoteLinkTitle"
|
||||
|
||||
type withFootnoteLinkTitle struct {
|
||||
value []byte
|
||||
}
|
||||
|
||||
func (o *withFootnoteLinkTitle) SetConfig(c *renderer.Config) {
|
||||
c.Options[optFootnoteLinkTitle] = o.value
|
||||
}
|
||||
|
||||
func (o *withFootnoteLinkTitle) SetFootnoteOption(c *FootnoteConfig) {
|
||||
c.LinkTitle = o.value
|
||||
}
|
||||
|
||||
// WithFootnoteLinkTitle is a functional option that is an optional title attribute for footnote links.
|
||||
func WithFootnoteLinkTitle(a []byte) FootnoteOption {
|
||||
return &withFootnoteLinkTitle{a}
|
||||
}
|
||||
|
||||
const optFootnoteBacklinkTitle renderer.OptionName = "FootnoteBacklinkTitle"
|
||||
|
||||
type withFootnoteBacklinkTitle struct {
|
||||
value []byte
|
||||
}
|
||||
|
||||
func (o *withFootnoteBacklinkTitle) SetConfig(c *renderer.Config) {
|
||||
c.Options[optFootnoteBacklinkTitle] = o.value
|
||||
}
|
||||
|
||||
func (o *withFootnoteBacklinkTitle) SetFootnoteOption(c *FootnoteConfig) {
|
||||
c.BacklinkTitle = o.value
|
||||
}
|
||||
|
||||
// WithFootnoteBacklinkTitle is a functional option that is an optional title attribute for footnote backlinks.
|
||||
func WithFootnoteBacklinkTitle(a []byte) FootnoteOption {
|
||||
return &withFootnoteBacklinkTitle{a}
|
||||
}
|
||||
|
||||
const optFootnoteLinkClass renderer.OptionName = "FootnoteLinkClass"
|
||||
|
||||
type withFootnoteLinkClass struct {
|
||||
value []byte
|
||||
}
|
||||
|
||||
func (o *withFootnoteLinkClass) SetConfig(c *renderer.Config) {
|
||||
c.Options[optFootnoteLinkClass] = o.value
|
||||
}
|
||||
|
||||
func (o *withFootnoteLinkClass) SetFootnoteOption(c *FootnoteConfig) {
|
||||
c.LinkClass = o.value
|
||||
}
|
||||
|
||||
// WithFootnoteLinkClass is a functional option that is a class for footnote links.
|
||||
func WithFootnoteLinkClass(a []byte) FootnoteOption {
|
||||
return &withFootnoteLinkClass{a}
|
||||
}
|
||||
|
||||
const optFootnoteBacklinkClass renderer.OptionName = "FootnoteBacklinkClass"
|
||||
|
||||
type withFootnoteBacklinkClass struct {
|
||||
value []byte
|
||||
}
|
||||
|
||||
func (o *withFootnoteBacklinkClass) SetConfig(c *renderer.Config) {
|
||||
c.Options[optFootnoteBacklinkClass] = o.value
|
||||
}
|
||||
|
||||
func (o *withFootnoteBacklinkClass) SetFootnoteOption(c *FootnoteConfig) {
|
||||
c.BacklinkClass = o.value
|
||||
}
|
||||
|
||||
// WithFootnoteBacklinkClass is a functional option that is a class for footnote backlinks.
|
||||
func WithFootnoteBacklinkClass(a []byte) FootnoteOption {
|
||||
return &withFootnoteBacklinkClass{a}
|
||||
}
|
||||
|
||||
const optFootnoteBacklinkHTML renderer.OptionName = "FootnoteBacklinkHTML"
|
||||
|
||||
type withFootnoteBacklinkHTML struct {
|
||||
value []byte
|
||||
}
|
||||
|
||||
func (o *withFootnoteBacklinkHTML) SetConfig(c *renderer.Config) {
|
||||
c.Options[optFootnoteBacklinkHTML] = o.value
|
||||
}
|
||||
|
||||
func (o *withFootnoteBacklinkHTML) SetFootnoteOption(c *FootnoteConfig) {
|
||||
c.BacklinkHTML = o.value
|
||||
}
|
||||
|
||||
// WithFootnoteBacklinkHTML is an HTML content for footnote backlinks.
|
||||
func WithFootnoteBacklinkHTML(a []byte) FootnoteOption {
|
||||
return &withFootnoteBacklinkHTML{a}
|
||||
}
|
||||
|
||||
// FootnoteHTMLRenderer is a renderer.NodeRenderer implementation that
|
||||
// renders FootnoteLink nodes.
|
||||
type FootnoteHTMLRenderer struct {
|
||||
FootnoteConfig
|
||||
}
|
||||
|
||||
// NewFootnoteHTMLRenderer returns a new FootnoteHTMLRenderer.
|
||||
func NewFootnoteHTMLRenderer(opts ...FootnoteOption) renderer.NodeRenderer {
|
||||
r := &FootnoteHTMLRenderer{
|
||||
FootnoteConfig: NewFootnoteConfig(),
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt.SetFootnoteOption(&r.FootnoteConfig)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs.
|
||||
func (r *FootnoteHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindFootnoteLink, r.renderFootnoteLink)
|
||||
reg.Register(ast.KindFootnoteBacklink, r.renderFootnoteBacklink)
|
||||
reg.Register(ast.KindFootnote, r.renderFootnote)
|
||||
reg.Register(ast.KindFootnoteList, r.renderFootnoteList)
|
||||
}
|
||||
|
||||
func (r *FootnoteHTMLRenderer) renderFootnoteLink(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
n := node.(*ast.FootnoteLink)
|
||||
is := strconv.Itoa(n.Index)
|
||||
_, _ = w.WriteString(`<sup id="`)
|
||||
_, _ = w.Write(r.idPrefix(node))
|
||||
_, _ = w.WriteString(`fnref`)
|
||||
if n.RefIndex > 0 {
|
||||
_, _ = w.WriteString(fmt.Sprintf("%v", n.RefIndex))
|
||||
}
|
||||
_ = w.WriteByte(':')
|
||||
_, _ = w.WriteString(is)
|
||||
_, _ = w.WriteString(`"><a href="#`)
|
||||
_, _ = w.Write(r.idPrefix(node))
|
||||
_, _ = w.WriteString(`fn:`)
|
||||
_, _ = w.WriteString(is)
|
||||
_, _ = w.WriteString(`" class="`)
|
||||
_, _ = w.Write(applyFootnoteTemplate(r.FootnoteConfig.LinkClass,
|
||||
n.Index, n.RefCount))
|
||||
if len(r.FootnoteConfig.LinkTitle) > 0 {
|
||||
_, _ = w.WriteString(`" title="`)
|
||||
_, _ = w.Write(util.EscapeHTML(applyFootnoteTemplate(r.FootnoteConfig.LinkTitle, n.Index, n.RefCount)))
|
||||
}
|
||||
_, _ = w.WriteString(`" role="doc-noteref">`)
|
||||
|
||||
_, _ = w.WriteString(is)
|
||||
_, _ = w.WriteString(`</a></sup>`)
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *FootnoteHTMLRenderer) renderFootnoteBacklink(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
n := node.(*ast.FootnoteBacklink)
|
||||
is := strconv.Itoa(n.Index)
|
||||
_, _ = w.WriteString(` <a href="#`)
|
||||
_, _ = w.Write(r.idPrefix(node))
|
||||
_, _ = w.WriteString(`fnref`)
|
||||
if n.RefIndex > 0 {
|
||||
_, _ = w.WriteString(fmt.Sprintf("%v", n.RefIndex))
|
||||
}
|
||||
_ = w.WriteByte(':')
|
||||
_, _ = w.WriteString(is)
|
||||
_, _ = w.WriteString(`" class="`)
|
||||
_, _ = w.Write(applyFootnoteTemplate(r.FootnoteConfig.BacklinkClass, n.Index, n.RefCount))
|
||||
if len(r.FootnoteConfig.BacklinkTitle) > 0 {
|
||||
_, _ = w.WriteString(`" title="`)
|
||||
_, _ = w.Write(util.EscapeHTML(applyFootnoteTemplate(r.FootnoteConfig.BacklinkTitle, n.Index, n.RefCount)))
|
||||
}
|
||||
_, _ = w.WriteString(`" role="doc-backlink">`)
|
||||
_, _ = w.Write(applyFootnoteTemplate(r.FootnoteConfig.BacklinkHTML, n.Index, n.RefCount))
|
||||
_, _ = w.WriteString(`</a>`)
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *FootnoteHTMLRenderer) renderFootnote(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
n := node.(*ast.Footnote)
|
||||
is := strconv.Itoa(n.Index)
|
||||
if entering {
|
||||
_, _ = w.WriteString(`<li id="`)
|
||||
_, _ = w.Write(r.idPrefix(node))
|
||||
_, _ = w.WriteString(`fn:`)
|
||||
_, _ = w.WriteString(is)
|
||||
_, _ = w.WriteString(`"`)
|
||||
if node.Attributes() != nil {
|
||||
html.RenderAttributes(w, node, html.ListItemAttributeFilter)
|
||||
}
|
||||
_, _ = w.WriteString(">\n")
|
||||
} else {
|
||||
_, _ = w.WriteString("</li>\n")
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *FootnoteHTMLRenderer) renderFootnoteList(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
_, _ = w.WriteString(`<div class="footnotes" role="doc-endnotes"`)
|
||||
if node.Attributes() != nil {
|
||||
html.RenderAttributes(w, node, html.GlobalAttributeFilter)
|
||||
}
|
||||
_ = w.WriteByte('>')
|
||||
if r.Config.XHTML {
|
||||
_, _ = w.WriteString("\n<hr />\n")
|
||||
} else {
|
||||
_, _ = w.WriteString("\n<hr>\n")
|
||||
}
|
||||
_, _ = w.WriteString("<ol>\n")
|
||||
} else {
|
||||
_, _ = w.WriteString("</ol>\n")
|
||||
_, _ = w.WriteString("</div>\n")
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *FootnoteHTMLRenderer) idPrefix(node gast.Node) []byte {
|
||||
if r.FootnoteConfig.IDPrefix != nil {
|
||||
return r.FootnoteConfig.IDPrefix
|
||||
}
|
||||
if r.FootnoteConfig.IDPrefixFunction != nil {
|
||||
return r.FootnoteConfig.IDPrefixFunction(node)
|
||||
}
|
||||
return []byte("")
|
||||
}
|
||||
|
||||
func applyFootnoteTemplate(b []byte, index, refCount int) []byte {
|
||||
fast := true
|
||||
for i, c := range b {
|
||||
if i != 0 {
|
||||
if b[i-1] == '^' && c == '^' {
|
||||
fast = false
|
||||
break
|
||||
}
|
||||
if b[i-1] == '%' && c == '%' {
|
||||
fast = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if fast {
|
||||
return b
|
||||
}
|
||||
is := []byte(strconv.Itoa(index))
|
||||
rs := []byte(strconv.Itoa(refCount))
|
||||
ret := bytes.Replace(b, []byte("^^"), is, -1)
|
||||
return bytes.Replace(ret, []byte("%%"), rs, -1)
|
||||
}
|
||||
|
||||
type footnote struct {
|
||||
options []FootnoteOption
|
||||
}
|
||||
|
||||
// Footnote is an extension that allow you to use PHP Markdown Extra Footnotes.
|
||||
var Footnote = &footnote{
|
||||
options: []FootnoteOption{},
|
||||
}
|
||||
|
||||
// NewFootnote returns a new extension with given options.
|
||||
func NewFootnote(opts ...FootnoteOption) goldmark.Extender {
|
||||
return &footnote{
|
||||
options: opts,
|
||||
}
|
||||
}
|
||||
|
||||
func (e *footnote) Extend(m goldmark.Markdown) {
|
||||
m.Parser().AddOptions(
|
||||
parser.WithBlockParsers(
|
||||
util.Prioritized(NewFootnoteBlockParser(), 999),
|
||||
),
|
||||
parser.WithInlineParsers(
|
||||
util.Prioritized(NewFootnoteParser(), 101),
|
||||
),
|
||||
parser.WithASTTransformers(
|
||||
util.Prioritized(NewFootnoteASTTransformer(), 999),
|
||||
),
|
||||
)
|
||||
m.Renderer().AddOptions(renderer.WithNodeRenderers(
|
||||
util.Prioritized(NewFootnoteHTMLRenderer(e.options...), 500),
|
||||
))
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package extension
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark"
|
||||
)
|
||||
|
||||
type gfm struct {
|
||||
}
|
||||
|
||||
// GFM is an extension that provides Github Flavored markdown functionalities.
|
||||
var GFM = &gfm{}
|
||||
|
||||
func (e *gfm) Extend(m goldmark.Markdown) {
|
||||
Linkify.Extend(m)
|
||||
Table.Extend(m)
|
||||
Strikethrough.Extend(m)
|
||||
TaskList.Extend(m)
|
||||
}
|
|
@ -0,0 +1,318 @@
|
|||
package extension
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
|
||||
"github.com/yuin/goldmark"
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var wwwURLRegxp = regexp.MustCompile(`^www\.[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-z]+(?:[/#?][-a-zA-Z0-9@:%_\+.~#!?&/=\(\);,'">\^{}\[\]` + "`" + `]*)?`)
|
||||
|
||||
var urlRegexp = regexp.MustCompile(`^(?:http|https|ftp)://[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-z]+(?::\d+)?(?:[/#?][-a-zA-Z0-9@:%_+.~#$!?&/=\(\);,'">\^{}\[\]` + "`" + `]*)?`)
|
||||
|
||||
// An LinkifyConfig struct is a data structure that holds configuration of the
|
||||
// Linkify extension.
|
||||
type LinkifyConfig struct {
|
||||
AllowedProtocols [][]byte
|
||||
URLRegexp *regexp.Regexp
|
||||
WWWRegexp *regexp.Regexp
|
||||
EmailRegexp *regexp.Regexp
|
||||
}
|
||||
|
||||
const (
|
||||
optLinkifyAllowedProtocols parser.OptionName = "LinkifyAllowedProtocols"
|
||||
optLinkifyURLRegexp parser.OptionName = "LinkifyURLRegexp"
|
||||
optLinkifyWWWRegexp parser.OptionName = "LinkifyWWWRegexp"
|
||||
optLinkifyEmailRegexp parser.OptionName = "LinkifyEmailRegexp"
|
||||
)
|
||||
|
||||
// SetOption implements SetOptioner.
|
||||
func (c *LinkifyConfig) SetOption(name parser.OptionName, value interface{}) {
|
||||
switch name {
|
||||
case optLinkifyAllowedProtocols:
|
||||
c.AllowedProtocols = value.([][]byte)
|
||||
case optLinkifyURLRegexp:
|
||||
c.URLRegexp = value.(*regexp.Regexp)
|
||||
case optLinkifyWWWRegexp:
|
||||
c.WWWRegexp = value.(*regexp.Regexp)
|
||||
case optLinkifyEmailRegexp:
|
||||
c.EmailRegexp = value.(*regexp.Regexp)
|
||||
}
|
||||
}
|
||||
|
||||
// A LinkifyOption interface sets options for the LinkifyOption.
|
||||
type LinkifyOption interface {
|
||||
parser.Option
|
||||
SetLinkifyOption(*LinkifyConfig)
|
||||
}
|
||||
|
||||
type withLinkifyAllowedProtocols struct {
|
||||
value [][]byte
|
||||
}
|
||||
|
||||
func (o *withLinkifyAllowedProtocols) SetParserOption(c *parser.Config) {
|
||||
c.Options[optLinkifyAllowedProtocols] = o.value
|
||||
}
|
||||
|
||||
func (o *withLinkifyAllowedProtocols) SetLinkifyOption(p *LinkifyConfig) {
|
||||
p.AllowedProtocols = o.value
|
||||
}
|
||||
|
||||
// WithLinkifyAllowedProtocols is a functional option that specify allowed
|
||||
// protocols in autolinks. Each protocol must end with ':' like
|
||||
// 'http:' .
|
||||
func WithLinkifyAllowedProtocols(value [][]byte) LinkifyOption {
|
||||
return &withLinkifyAllowedProtocols{
|
||||
value: value,
|
||||
}
|
||||
}
|
||||
|
||||
type withLinkifyURLRegexp struct {
|
||||
value *regexp.Regexp
|
||||
}
|
||||
|
||||
func (o *withLinkifyURLRegexp) SetParserOption(c *parser.Config) {
|
||||
c.Options[optLinkifyURLRegexp] = o.value
|
||||
}
|
||||
|
||||
func (o *withLinkifyURLRegexp) SetLinkifyOption(p *LinkifyConfig) {
|
||||
p.URLRegexp = o.value
|
||||
}
|
||||
|
||||
// WithLinkifyURLRegexp is a functional option that specify
|
||||
// a pattern of the URL including a protocol.
|
||||
func WithLinkifyURLRegexp(value *regexp.Regexp) LinkifyOption {
|
||||
return &withLinkifyURLRegexp{
|
||||
value: value,
|
||||
}
|
||||
}
|
||||
|
||||
// WithLinkifyWWWRegexp is a functional option that specify
|
||||
// a pattern of the URL without a protocol.
|
||||
// This pattern must start with 'www.' .
|
||||
type withLinkifyWWWRegexp struct {
|
||||
value *regexp.Regexp
|
||||
}
|
||||
|
||||
func (o *withLinkifyWWWRegexp) SetParserOption(c *parser.Config) {
|
||||
c.Options[optLinkifyWWWRegexp] = o.value
|
||||
}
|
||||
|
||||
func (o *withLinkifyWWWRegexp) SetLinkifyOption(p *LinkifyConfig) {
|
||||
p.WWWRegexp = o.value
|
||||
}
|
||||
|
||||
func WithLinkifyWWWRegexp(value *regexp.Regexp) LinkifyOption {
|
||||
return &withLinkifyWWWRegexp{
|
||||
value: value,
|
||||
}
|
||||
}
|
||||
|
||||
// WithLinkifyWWWRegexp is a functional otpion that specify
|
||||
// a pattern of the email address.
|
||||
type withLinkifyEmailRegexp struct {
|
||||
value *regexp.Regexp
|
||||
}
|
||||
|
||||
func (o *withLinkifyEmailRegexp) SetParserOption(c *parser.Config) {
|
||||
c.Options[optLinkifyEmailRegexp] = o.value
|
||||
}
|
||||
|
||||
func (o *withLinkifyEmailRegexp) SetLinkifyOption(p *LinkifyConfig) {
|
||||
p.EmailRegexp = o.value
|
||||
}
|
||||
|
||||
func WithLinkifyEmailRegexp(value *regexp.Regexp) LinkifyOption {
|
||||
return &withLinkifyEmailRegexp{
|
||||
value: value,
|
||||
}
|
||||
}
|
||||
|
||||
type linkifyParser struct {
|
||||
LinkifyConfig
|
||||
}
|
||||
|
||||
// NewLinkifyParser return a new InlineParser can parse
|
||||
// text that seems like a URL.
|
||||
func NewLinkifyParser(opts ...LinkifyOption) parser.InlineParser {
|
||||
p := &linkifyParser{
|
||||
LinkifyConfig: LinkifyConfig{
|
||||
AllowedProtocols: nil,
|
||||
URLRegexp: urlRegexp,
|
||||
WWWRegexp: wwwURLRegxp,
|
||||
},
|
||||
}
|
||||
for _, o := range opts {
|
||||
o.SetLinkifyOption(&p.LinkifyConfig)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func (s *linkifyParser) Trigger() []byte {
|
||||
// ' ' indicates any white spaces and a line head
|
||||
return []byte{' ', '*', '_', '~', '('}
|
||||
}
|
||||
|
||||
var (
|
||||
protoHTTP = []byte("http:")
|
||||
protoHTTPS = []byte("https:")
|
||||
protoFTP = []byte("ftp:")
|
||||
domainWWW = []byte("www.")
|
||||
)
|
||||
|
||||
func (s *linkifyParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node {
|
||||
if pc.IsInLinkLabel() {
|
||||
return nil
|
||||
}
|
||||
line, segment := block.PeekLine()
|
||||
consumes := 0
|
||||
start := segment.Start
|
||||
c := line[0]
|
||||
// advance if current position is not a line head.
|
||||
if c == ' ' || c == '*' || c == '_' || c == '~' || c == '(' {
|
||||
consumes++
|
||||
start++
|
||||
line = line[1:]
|
||||
}
|
||||
|
||||
var m []int
|
||||
var protocol []byte
|
||||
var typ ast.AutoLinkType = ast.AutoLinkURL
|
||||
if s.LinkifyConfig.AllowedProtocols == nil {
|
||||
if bytes.HasPrefix(line, protoHTTP) || bytes.HasPrefix(line, protoHTTPS) || bytes.HasPrefix(line, protoFTP) {
|
||||
m = s.LinkifyConfig.URLRegexp.FindSubmatchIndex(line)
|
||||
}
|
||||
} else {
|
||||
for _, prefix := range s.LinkifyConfig.AllowedProtocols {
|
||||
if bytes.HasPrefix(line, prefix) {
|
||||
m = s.LinkifyConfig.URLRegexp.FindSubmatchIndex(line)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if m == nil && bytes.HasPrefix(line, domainWWW) {
|
||||
m = s.LinkifyConfig.WWWRegexp.FindSubmatchIndex(line)
|
||||
protocol = []byte("http")
|
||||
}
|
||||
if m != nil && m[0] != 0 {
|
||||
m = nil
|
||||
}
|
||||
if m != nil && m[0] == 0 {
|
||||
lastChar := line[m[1]-1]
|
||||
if lastChar == '.' {
|
||||
m[1]--
|
||||
} else if lastChar == ')' {
|
||||
closing := 0
|
||||
for i := m[1] - 1; i >= m[0]; i-- {
|
||||
if line[i] == ')' {
|
||||
closing++
|
||||
} else if line[i] == '(' {
|
||||
closing--
|
||||
}
|
||||
}
|
||||
if closing > 0 {
|
||||
m[1] -= closing
|
||||
}
|
||||
} else if lastChar == ';' {
|
||||
i := m[1] - 2
|
||||
for ; i >= m[0]; i-- {
|
||||
if util.IsAlphaNumeric(line[i]) {
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
if i != m[1]-2 {
|
||||
if line[i] == '&' {
|
||||
m[1] -= m[1] - i
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if m == nil {
|
||||
if len(line) > 0 && util.IsPunct(line[0]) {
|
||||
return nil
|
||||
}
|
||||
typ = ast.AutoLinkEmail
|
||||
stop := -1
|
||||
if s.LinkifyConfig.EmailRegexp == nil {
|
||||
stop = util.FindEmailIndex(line)
|
||||
} else {
|
||||
m := s.LinkifyConfig.EmailRegexp.FindSubmatchIndex(line)
|
||||
if m != nil && m[0] == 0 {
|
||||
stop = m[1]
|
||||
}
|
||||
}
|
||||
if stop < 0 {
|
||||
return nil
|
||||
}
|
||||
at := bytes.IndexByte(line, '@')
|
||||
m = []int{0, stop, at, stop - 1}
|
||||
if m == nil || bytes.IndexByte(line[m[2]:m[3]], '.') < 0 {
|
||||
return nil
|
||||
}
|
||||
lastChar := line[m[1]-1]
|
||||
if lastChar == '.' {
|
||||
m[1]--
|
||||
}
|
||||
if m[1] < len(line) {
|
||||
nextChar := line[m[1]]
|
||||
if nextChar == '-' || nextChar == '_' {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
if m == nil {
|
||||
return nil
|
||||
}
|
||||
if consumes != 0 {
|
||||
s := segment.WithStop(segment.Start + 1)
|
||||
ast.MergeOrAppendTextSegment(parent, s)
|
||||
}
|
||||
i := m[1] - 1
|
||||
for ; i > 0; i-- {
|
||||
c := line[i]
|
||||
switch c {
|
||||
case '?', '!', '.', ',', ':', '*', '_', '~':
|
||||
default:
|
||||
goto endfor
|
||||
}
|
||||
}
|
||||
endfor:
|
||||
i++
|
||||
consumes += i
|
||||
block.Advance(consumes)
|
||||
n := ast.NewTextSegment(text.NewSegment(start, start+i))
|
||||
link := ast.NewAutoLink(typ, n)
|
||||
link.Protocol = protocol
|
||||
return link
|
||||
}
|
||||
|
||||
func (s *linkifyParser) CloseBlock(parent ast.Node, pc parser.Context) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
type linkify struct {
|
||||
options []LinkifyOption
|
||||
}
|
||||
|
||||
// Linkify is an extension that allow you to parse text that seems like a URL.
|
||||
var Linkify = &linkify{}
|
||||
|
||||
func NewLinkify(opts ...LinkifyOption) goldmark.Extender {
|
||||
return &linkify{
|
||||
options: opts,
|
||||
}
|
||||
}
|
||||
|
||||
func (e *linkify) Extend(m goldmark.Markdown) {
|
||||
m.Parser().AddOptions(
|
||||
parser.WithInlineParsers(
|
||||
util.Prioritized(NewLinkifyParser(e.options...), 999),
|
||||
),
|
||||
)
|
||||
}
|
|
@ -0,0 +1,116 @@
|
|||
package extension
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark"
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/extension/ast"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type strikethroughDelimiterProcessor struct {
|
||||
}
|
||||
|
||||
func (p *strikethroughDelimiterProcessor) IsDelimiter(b byte) bool {
|
||||
return b == '~'
|
||||
}
|
||||
|
||||
func (p *strikethroughDelimiterProcessor) CanOpenCloser(opener, closer *parser.Delimiter) bool {
|
||||
return opener.Char == closer.Char
|
||||
}
|
||||
|
||||
func (p *strikethroughDelimiterProcessor) OnMatch(consumes int) gast.Node {
|
||||
return ast.NewStrikethrough()
|
||||
}
|
||||
|
||||
var defaultStrikethroughDelimiterProcessor = &strikethroughDelimiterProcessor{}
|
||||
|
||||
type strikethroughParser struct {
|
||||
}
|
||||
|
||||
var defaultStrikethroughParser = &strikethroughParser{}
|
||||
|
||||
// NewStrikethroughParser return a new InlineParser that parses
|
||||
// strikethrough expressions.
|
||||
func NewStrikethroughParser() parser.InlineParser {
|
||||
return defaultStrikethroughParser
|
||||
}
|
||||
|
||||
func (s *strikethroughParser) Trigger() []byte {
|
||||
return []byte{'~'}
|
||||
}
|
||||
|
||||
func (s *strikethroughParser) Parse(parent gast.Node, block text.Reader, pc parser.Context) gast.Node {
|
||||
before := block.PrecendingCharacter()
|
||||
line, segment := block.PeekLine()
|
||||
node := parser.ScanDelimiter(line, before, 2, defaultStrikethroughDelimiterProcessor)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
node.Segment = segment.WithStop(segment.Start + node.OriginalLength)
|
||||
block.Advance(node.OriginalLength)
|
||||
pc.PushDelimiter(node)
|
||||
return node
|
||||
}
|
||||
|
||||
func (s *strikethroughParser) CloseBlock(parent gast.Node, pc parser.Context) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
// StrikethroughHTMLRenderer is a renderer.NodeRenderer implementation that
|
||||
// renders Strikethrough nodes.
|
||||
type StrikethroughHTMLRenderer struct {
|
||||
html.Config
|
||||
}
|
||||
|
||||
// NewStrikethroughHTMLRenderer returns a new StrikethroughHTMLRenderer.
|
||||
func NewStrikethroughHTMLRenderer(opts ...html.Option) renderer.NodeRenderer {
|
||||
r := &StrikethroughHTMLRenderer{
|
||||
Config: html.NewConfig(),
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt.SetHTMLOption(&r.Config)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs.
|
||||
func (r *StrikethroughHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindStrikethrough, r.renderStrikethrough)
|
||||
}
|
||||
|
||||
// StrikethroughAttributeFilter defines attribute names which dd elements can have.
|
||||
var StrikethroughAttributeFilter = html.GlobalAttributeFilter
|
||||
|
||||
func (r *StrikethroughHTMLRenderer) renderStrikethrough(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
if n.Attributes() != nil {
|
||||
_, _ = w.WriteString("<del")
|
||||
html.RenderAttributes(w, n, StrikethroughAttributeFilter)
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("<del>")
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("</del>")
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
type strikethrough struct {
|
||||
}
|
||||
|
||||
// Strikethrough is an extension that allow you to use strikethrough expression like '~~text~~' .
|
||||
var Strikethrough = &strikethrough{}
|
||||
|
||||
func (e *strikethrough) Extend(m goldmark.Markdown) {
|
||||
m.Parser().AddOptions(parser.WithInlineParsers(
|
||||
util.Prioritized(NewStrikethroughParser(), 500),
|
||||
))
|
||||
m.Renderer().AddOptions(renderer.WithNodeRenderers(
|
||||
util.Prioritized(NewStrikethroughHTMLRenderer(), 500),
|
||||
))
|
||||
}
|
|
@ -0,0 +1,552 @@
|
|||
package extension
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"regexp"
|
||||
|
||||
"github.com/yuin/goldmark"
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/extension/ast"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var escapedPipeCellListKey = parser.NewContextKey()
|
||||
|
||||
type escapedPipeCell struct {
|
||||
Cell *ast.TableCell
|
||||
Pos []int
|
||||
Transformed bool
|
||||
}
|
||||
|
||||
// TableCellAlignMethod indicates how are table cells aligned in HTML format.indicates how are table cells aligned in HTML format.
|
||||
type TableCellAlignMethod int
|
||||
|
||||
const (
|
||||
// TableCellAlignDefault renders alignments by default method.
|
||||
// With XHTML, alignments are rendered as an align attribute.
|
||||
// With HTML5, alignments are rendered as a style attribute.
|
||||
TableCellAlignDefault TableCellAlignMethod = iota
|
||||
|
||||
// TableCellAlignAttribute renders alignments as an align attribute.
|
||||
TableCellAlignAttribute
|
||||
|
||||
// TableCellAlignStyle renders alignments as a style attribute.
|
||||
TableCellAlignStyle
|
||||
|
||||
// TableCellAlignNone does not care about alignments.
|
||||
// If you using classes or other styles, you can add these attributes
|
||||
// in an ASTTransformer.
|
||||
TableCellAlignNone
|
||||
)
|
||||
|
||||
// TableConfig struct holds options for the extension.
|
||||
type TableConfig struct {
|
||||
html.Config
|
||||
|
||||
// TableCellAlignMethod indicates how are table celss aligned.
|
||||
TableCellAlignMethod TableCellAlignMethod
|
||||
}
|
||||
|
||||
// TableOption interface is a functional option interface for the extension.
|
||||
type TableOption interface {
|
||||
renderer.Option
|
||||
// SetTableOption sets given option to the extension.
|
||||
SetTableOption(*TableConfig)
|
||||
}
|
||||
|
||||
// NewTableConfig returns a new Config with defaults.
|
||||
func NewTableConfig() TableConfig {
|
||||
return TableConfig{
|
||||
Config: html.NewConfig(),
|
||||
TableCellAlignMethod: TableCellAlignDefault,
|
||||
}
|
||||
}
|
||||
|
||||
// SetOption implements renderer.SetOptioner.
|
||||
func (c *TableConfig) SetOption(name renderer.OptionName, value interface{}) {
|
||||
switch name {
|
||||
case optTableCellAlignMethod:
|
||||
c.TableCellAlignMethod = value.(TableCellAlignMethod)
|
||||
default:
|
||||
c.Config.SetOption(name, value)
|
||||
}
|
||||
}
|
||||
|
||||
type withTableHTMLOptions struct {
|
||||
value []html.Option
|
||||
}
|
||||
|
||||
func (o *withTableHTMLOptions) SetConfig(c *renderer.Config) {
|
||||
if o.value != nil {
|
||||
for _, v := range o.value {
|
||||
v.(renderer.Option).SetConfig(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (o *withTableHTMLOptions) SetTableOption(c *TableConfig) {
|
||||
if o.value != nil {
|
||||
for _, v := range o.value {
|
||||
v.SetHTMLOption(&c.Config)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WithTableHTMLOptions is functional option that wraps goldmark HTMLRenderer options.
|
||||
func WithTableHTMLOptions(opts ...html.Option) TableOption {
|
||||
return &withTableHTMLOptions{opts}
|
||||
}
|
||||
|
||||
const optTableCellAlignMethod renderer.OptionName = "TableTableCellAlignMethod"
|
||||
|
||||
type withTableCellAlignMethod struct {
|
||||
value TableCellAlignMethod
|
||||
}
|
||||
|
||||
func (o *withTableCellAlignMethod) SetConfig(c *renderer.Config) {
|
||||
c.Options[optTableCellAlignMethod] = o.value
|
||||
}
|
||||
|
||||
func (o *withTableCellAlignMethod) SetTableOption(c *TableConfig) {
|
||||
c.TableCellAlignMethod = o.value
|
||||
}
|
||||
|
||||
// WithTableCellAlignMethod is a functional option that indicates how are table cells aligned in HTML format.
|
||||
func WithTableCellAlignMethod(a TableCellAlignMethod) TableOption {
|
||||
return &withTableCellAlignMethod{a}
|
||||
}
|
||||
|
||||
func isTableDelim(bs []byte) bool {
|
||||
for _, b := range bs {
|
||||
if !(util.IsSpace(b) || b == '-' || b == '|' || b == ':') {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
var tableDelimLeft = regexp.MustCompile(`^\s*\:\-+\s*$`)
|
||||
var tableDelimRight = regexp.MustCompile(`^\s*\-+\:\s*$`)
|
||||
var tableDelimCenter = regexp.MustCompile(`^\s*\:\-+\:\s*$`)
|
||||
var tableDelimNone = regexp.MustCompile(`^\s*\-+\s*$`)
|
||||
|
||||
type tableParagraphTransformer struct {
|
||||
}
|
||||
|
||||
var defaultTableParagraphTransformer = &tableParagraphTransformer{}
|
||||
|
||||
// NewTableParagraphTransformer returns a new ParagraphTransformer
|
||||
// that can transform paragraphs into tables.
|
||||
func NewTableParagraphTransformer() parser.ParagraphTransformer {
|
||||
return defaultTableParagraphTransformer
|
||||
}
|
||||
|
||||
func (b *tableParagraphTransformer) Transform(node *gast.Paragraph, reader text.Reader, pc parser.Context) {
|
||||
lines := node.Lines()
|
||||
if lines.Len() < 2 {
|
||||
return
|
||||
}
|
||||
for i := 1; i < lines.Len(); i++ {
|
||||
alignments := b.parseDelimiter(lines.At(i), reader)
|
||||
if alignments == nil {
|
||||
continue
|
||||
}
|
||||
header := b.parseRow(lines.At(i-1), alignments, true, reader, pc)
|
||||
if header == nil || len(alignments) != header.ChildCount() {
|
||||
return
|
||||
}
|
||||
table := ast.NewTable()
|
||||
table.Alignments = alignments
|
||||
table.AppendChild(table, ast.NewTableHeader(header))
|
||||
for j := i + 1; j < lines.Len(); j++ {
|
||||
table.AppendChild(table, b.parseRow(lines.At(j), alignments, false, reader, pc))
|
||||
}
|
||||
node.Lines().SetSliced(0, i-1)
|
||||
node.Parent().InsertAfter(node.Parent(), node, table)
|
||||
if node.Lines().Len() == 0 {
|
||||
node.Parent().RemoveChild(node.Parent(), node)
|
||||
} else {
|
||||
last := node.Lines().At(i - 2)
|
||||
last.Stop = last.Stop - 1 // trim last newline(\n)
|
||||
node.Lines().Set(i-2, last)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *tableParagraphTransformer) parseRow(segment text.Segment, alignments []ast.Alignment, isHeader bool, reader text.Reader, pc parser.Context) *ast.TableRow {
|
||||
source := reader.Source()
|
||||
line := segment.Value(source)
|
||||
pos := 0
|
||||
pos += util.TrimLeftSpaceLength(line)
|
||||
limit := len(line)
|
||||
limit -= util.TrimRightSpaceLength(line)
|
||||
row := ast.NewTableRow(alignments)
|
||||
if len(line) > 0 && line[pos] == '|' {
|
||||
pos++
|
||||
}
|
||||
if len(line) > 0 && line[limit-1] == '|' {
|
||||
limit--
|
||||
}
|
||||
i := 0
|
||||
for ; pos < limit; i++ {
|
||||
alignment := ast.AlignNone
|
||||
if i >= len(alignments) {
|
||||
if !isHeader {
|
||||
return row
|
||||
}
|
||||
} else {
|
||||
alignment = alignments[i]
|
||||
}
|
||||
|
||||
var escapedCell *escapedPipeCell
|
||||
node := ast.NewTableCell()
|
||||
node.Alignment = alignment
|
||||
hasBacktick := false
|
||||
closure := pos
|
||||
for ; closure < limit; closure++ {
|
||||
if line[closure] == '`' {
|
||||
hasBacktick = true
|
||||
}
|
||||
if line[closure] == '|' {
|
||||
if closure == 0 || line[closure-1] != '\\' {
|
||||
break
|
||||
} else if hasBacktick {
|
||||
if escapedCell == nil {
|
||||
escapedCell = &escapedPipeCell{node, []int{}, false}
|
||||
escapedList := pc.ComputeIfAbsent(escapedPipeCellListKey,
|
||||
func() interface{} {
|
||||
return []*escapedPipeCell{}
|
||||
}).([]*escapedPipeCell)
|
||||
escapedList = append(escapedList, escapedCell)
|
||||
pc.Set(escapedPipeCellListKey, escapedList)
|
||||
}
|
||||
escapedCell.Pos = append(escapedCell.Pos, segment.Start+closure-1)
|
||||
}
|
||||
}
|
||||
}
|
||||
seg := text.NewSegment(segment.Start+pos, segment.Start+closure)
|
||||
seg = seg.TrimLeftSpace(source)
|
||||
seg = seg.TrimRightSpace(source)
|
||||
node.Lines().Append(seg)
|
||||
row.AppendChild(row, node)
|
||||
pos = closure + 1
|
||||
}
|
||||
for ; i < len(alignments); i++ {
|
||||
row.AppendChild(row, ast.NewTableCell())
|
||||
}
|
||||
return row
|
||||
}
|
||||
|
||||
func (b *tableParagraphTransformer) parseDelimiter(segment text.Segment, reader text.Reader) []ast.Alignment {
|
||||
line := segment.Value(reader.Source())
|
||||
if !isTableDelim(line) {
|
||||
return nil
|
||||
}
|
||||
cols := bytes.Split(line, []byte{'|'})
|
||||
if util.IsBlank(cols[0]) {
|
||||
cols = cols[1:]
|
||||
}
|
||||
if len(cols) > 0 && util.IsBlank(cols[len(cols)-1]) {
|
||||
cols = cols[:len(cols)-1]
|
||||
}
|
||||
|
||||
var alignments []ast.Alignment
|
||||
for _, col := range cols {
|
||||
if tableDelimLeft.Match(col) {
|
||||
alignments = append(alignments, ast.AlignLeft)
|
||||
} else if tableDelimRight.Match(col) {
|
||||
alignments = append(alignments, ast.AlignRight)
|
||||
} else if tableDelimCenter.Match(col) {
|
||||
alignments = append(alignments, ast.AlignCenter)
|
||||
} else if tableDelimNone.Match(col) {
|
||||
alignments = append(alignments, ast.AlignNone)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return alignments
|
||||
}
|
||||
|
||||
type tableASTTransformer struct {
|
||||
}
|
||||
|
||||
var defaultTableASTTransformer = &tableASTTransformer{}
|
||||
|
||||
// NewTableASTTransformer returns a parser.ASTTransformer for tables.
|
||||
func NewTableASTTransformer() parser.ASTTransformer {
|
||||
return defaultTableASTTransformer
|
||||
}
|
||||
|
||||
func (a *tableASTTransformer) Transform(node *gast.Document, reader text.Reader, pc parser.Context) {
|
||||
lst := pc.Get(escapedPipeCellListKey)
|
||||
if lst == nil {
|
||||
return
|
||||
}
|
||||
pc.Set(escapedPipeCellListKey, nil)
|
||||
for _, v := range lst.([]*escapedPipeCell) {
|
||||
if v.Transformed {
|
||||
continue
|
||||
}
|
||||
_ = gast.Walk(v.Cell, func(n gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if !entering || n.Kind() != gast.KindCodeSpan {
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
for c := n.FirstChild(); c != nil; {
|
||||
next := c.NextSibling()
|
||||
if c.Kind() != gast.KindText {
|
||||
c = next
|
||||
continue
|
||||
}
|
||||
parent := c.Parent()
|
||||
ts := &c.(*gast.Text).Segment
|
||||
n := c
|
||||
for _, v := range lst.([]*escapedPipeCell) {
|
||||
for _, pos := range v.Pos {
|
||||
if ts.Start <= pos && pos < ts.Stop {
|
||||
segment := n.(*gast.Text).Segment
|
||||
n1 := gast.NewRawTextSegment(segment.WithStop(pos))
|
||||
n2 := gast.NewRawTextSegment(segment.WithStart(pos + 1))
|
||||
parent.InsertAfter(parent, n, n1)
|
||||
parent.InsertAfter(parent, n1, n2)
|
||||
parent.RemoveChild(parent, n)
|
||||
n = n2
|
||||
v.Transformed = true
|
||||
}
|
||||
}
|
||||
}
|
||||
c = next
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TableHTMLRenderer is a renderer.NodeRenderer implementation that
|
||||
// renders Table nodes.
|
||||
type TableHTMLRenderer struct {
|
||||
TableConfig
|
||||
}
|
||||
|
||||
// NewTableHTMLRenderer returns a new TableHTMLRenderer.
|
||||
func NewTableHTMLRenderer(opts ...TableOption) renderer.NodeRenderer {
|
||||
r := &TableHTMLRenderer{
|
||||
TableConfig: NewTableConfig(),
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt.SetTableOption(&r.TableConfig)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs.
|
||||
func (r *TableHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindTable, r.renderTable)
|
||||
reg.Register(ast.KindTableHeader, r.renderTableHeader)
|
||||
reg.Register(ast.KindTableRow, r.renderTableRow)
|
||||
reg.Register(ast.KindTableCell, r.renderTableCell)
|
||||
}
|
||||
|
||||
// TableAttributeFilter defines attribute names which table elements can have.
|
||||
var TableAttributeFilter = html.GlobalAttributeFilter.Extend(
|
||||
[]byte("align"), // [Deprecated]
|
||||
[]byte("bgcolor"), // [Deprecated]
|
||||
[]byte("border"), // [Deprecated]
|
||||
[]byte("cellpadding"), // [Deprecated]
|
||||
[]byte("cellspacing"), // [Deprecated]
|
||||
[]byte("frame"), // [Deprecated]
|
||||
[]byte("rules"), // [Deprecated]
|
||||
[]byte("summary"), // [Deprecated]
|
||||
[]byte("width"), // [Deprecated]
|
||||
)
|
||||
|
||||
func (r *TableHTMLRenderer) renderTable(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
_, _ = w.WriteString("<table")
|
||||
if n.Attributes() != nil {
|
||||
html.RenderAttributes(w, n, TableAttributeFilter)
|
||||
}
|
||||
_, _ = w.WriteString(">\n")
|
||||
} else {
|
||||
_, _ = w.WriteString("</table>\n")
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// TableHeaderAttributeFilter defines attribute names which <thead> elements can have.
|
||||
var TableHeaderAttributeFilter = html.GlobalAttributeFilter.Extend(
|
||||
[]byte("align"), // [Deprecated since HTML4] [Obsolete since HTML5]
|
||||
[]byte("bgcolor"), // [Not Standardized]
|
||||
[]byte("char"), // [Deprecated since HTML4] [Obsolete since HTML5]
|
||||
[]byte("charoff"), // [Deprecated since HTML4] [Obsolete since HTML5]
|
||||
[]byte("valign"), // [Deprecated since HTML4] [Obsolete since HTML5]
|
||||
)
|
||||
|
||||
func (r *TableHTMLRenderer) renderTableHeader(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
_, _ = w.WriteString("<thead")
|
||||
if n.Attributes() != nil {
|
||||
html.RenderAttributes(w, n, TableHeaderAttributeFilter)
|
||||
}
|
||||
_, _ = w.WriteString(">\n")
|
||||
_, _ = w.WriteString("<tr>\n") // Header <tr> has no separate handle
|
||||
} else {
|
||||
_, _ = w.WriteString("</tr>\n")
|
||||
_, _ = w.WriteString("</thead>\n")
|
||||
if n.NextSibling() != nil {
|
||||
_, _ = w.WriteString("<tbody>\n")
|
||||
}
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// TableRowAttributeFilter defines attribute names which <tr> elements can have.
|
||||
var TableRowAttributeFilter = html.GlobalAttributeFilter.Extend(
|
||||
[]byte("align"), // [Obsolete since HTML5]
|
||||
[]byte("bgcolor"), // [Obsolete since HTML5]
|
||||
[]byte("char"), // [Obsolete since HTML5]
|
||||
[]byte("charoff"), // [Obsolete since HTML5]
|
||||
[]byte("valign"), // [Obsolete since HTML5]
|
||||
)
|
||||
|
||||
func (r *TableHTMLRenderer) renderTableRow(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if entering {
|
||||
_, _ = w.WriteString("<tr")
|
||||
if n.Attributes() != nil {
|
||||
html.RenderAttributes(w, n, TableRowAttributeFilter)
|
||||
}
|
||||
_, _ = w.WriteString(">\n")
|
||||
} else {
|
||||
_, _ = w.WriteString("</tr>\n")
|
||||
if n.Parent().LastChild() == n {
|
||||
_, _ = w.WriteString("</tbody>\n")
|
||||
}
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// TableThCellAttributeFilter defines attribute names which table <th> cells can have.
|
||||
var TableThCellAttributeFilter = html.GlobalAttributeFilter.Extend(
|
||||
[]byte("abbr"), // [OK] Contains a short abbreviated description of the cell's content [NOT OK in <td>]
|
||||
|
||||
[]byte("align"), // [Obsolete since HTML5]
|
||||
[]byte("axis"), // [Obsolete since HTML5]
|
||||
[]byte("bgcolor"), // [Not Standardized]
|
||||
[]byte("char"), // [Obsolete since HTML5]
|
||||
[]byte("charoff"), // [Obsolete since HTML5]
|
||||
|
||||
[]byte("colspan"), // [OK] Number of columns that the cell is to span
|
||||
[]byte("headers"), // [OK] This attribute contains a list of space-separated strings, each corresponding to the id attribute of the <th> elements that apply to this element
|
||||
|
||||
[]byte("height"), // [Deprecated since HTML4] [Obsolete since HTML5]
|
||||
|
||||
[]byte("rowspan"), // [OK] Number of rows that the cell is to span
|
||||
[]byte("scope"), // [OK] This enumerated attribute defines the cells that the header (defined in the <th>) element relates to [NOT OK in <td>]
|
||||
|
||||
[]byte("valign"), // [Obsolete since HTML5]
|
||||
[]byte("width"), // [Deprecated since HTML4] [Obsolete since HTML5]
|
||||
)
|
||||
|
||||
// TableTdCellAttributeFilter defines attribute names which table <td> cells can have.
|
||||
var TableTdCellAttributeFilter = html.GlobalAttributeFilter.Extend(
|
||||
[]byte("abbr"), // [Obsolete since HTML5] [OK in <th>]
|
||||
[]byte("align"), // [Obsolete since HTML5]
|
||||
[]byte("axis"), // [Obsolete since HTML5]
|
||||
[]byte("bgcolor"), // [Not Standardized]
|
||||
[]byte("char"), // [Obsolete since HTML5]
|
||||
[]byte("charoff"), // [Obsolete since HTML5]
|
||||
|
||||
[]byte("colspan"), // [OK] Number of columns that the cell is to span
|
||||
[]byte("headers"), // [OK] This attribute contains a list of space-separated strings, each corresponding to the id attribute of the <th> elements that apply to this element
|
||||
|
||||
[]byte("height"), // [Deprecated since HTML4] [Obsolete since HTML5]
|
||||
|
||||
[]byte("rowspan"), // [OK] Number of rows that the cell is to span
|
||||
|
||||
[]byte("scope"), // [Obsolete since HTML5] [OK in <th>]
|
||||
[]byte("valign"), // [Obsolete since HTML5]
|
||||
[]byte("width"), // [Deprecated since HTML4] [Obsolete since HTML5]
|
||||
)
|
||||
|
||||
func (r *TableHTMLRenderer) renderTableCell(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
n := node.(*ast.TableCell)
|
||||
tag := "td"
|
||||
if n.Parent().Kind() == ast.KindTableHeader {
|
||||
tag = "th"
|
||||
}
|
||||
if entering {
|
||||
fmt.Fprintf(w, "<%s", tag)
|
||||
if n.Alignment != ast.AlignNone {
|
||||
amethod := r.TableConfig.TableCellAlignMethod
|
||||
if amethod == TableCellAlignDefault {
|
||||
if r.Config.XHTML {
|
||||
amethod = TableCellAlignAttribute
|
||||
} else {
|
||||
amethod = TableCellAlignStyle
|
||||
}
|
||||
}
|
||||
switch amethod {
|
||||
case TableCellAlignAttribute:
|
||||
if _, ok := n.AttributeString("align"); !ok { // Skip align render if overridden
|
||||
fmt.Fprintf(w, ` align="%s"`, n.Alignment.String())
|
||||
}
|
||||
case TableCellAlignStyle:
|
||||
v, ok := n.AttributeString("style")
|
||||
var cob util.CopyOnWriteBuffer
|
||||
if ok {
|
||||
cob = util.NewCopyOnWriteBuffer(v.([]byte))
|
||||
cob.AppendByte(';')
|
||||
}
|
||||
style := fmt.Sprintf("text-align:%s", n.Alignment.String())
|
||||
cob.AppendString(style)
|
||||
n.SetAttributeString("style", cob.Bytes())
|
||||
}
|
||||
}
|
||||
if n.Attributes() != nil {
|
||||
if tag == "td" {
|
||||
html.RenderAttributes(w, n, TableTdCellAttributeFilter) // <td>
|
||||
} else {
|
||||
html.RenderAttributes(w, n, TableThCellAttributeFilter) // <th>
|
||||
}
|
||||
}
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
fmt.Fprintf(w, "</%s>\n", tag)
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
type table struct {
|
||||
options []TableOption
|
||||
}
|
||||
|
||||
// Table is an extension that allow you to use GFM tables .
|
||||
var Table = &table{
|
||||
options: []TableOption{},
|
||||
}
|
||||
|
||||
// NewTable returns a new extension with given options.
|
||||
func NewTable(opts ...TableOption) goldmark.Extender {
|
||||
return &table{
|
||||
options: opts,
|
||||
}
|
||||
}
|
||||
|
||||
func (e *table) Extend(m goldmark.Markdown) {
|
||||
m.Parser().AddOptions(
|
||||
parser.WithParagraphTransformers(
|
||||
util.Prioritized(NewTableParagraphTransformer(), 200),
|
||||
),
|
||||
parser.WithASTTransformers(
|
||||
util.Prioritized(defaultTableASTTransformer, 0),
|
||||
),
|
||||
)
|
||||
m.Renderer().AddOptions(renderer.WithNodeRenderers(
|
||||
util.Prioritized(NewTableHTMLRenderer(e.options...), 500),
|
||||
))
|
||||
}
|
|
@ -0,0 +1,115 @@
|
|||
package extension
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark"
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/extension/ast"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
var taskListRegexp = regexp.MustCompile(`^\[([\sxX])\]\s*`)
|
||||
|
||||
type taskCheckBoxParser struct {
|
||||
}
|
||||
|
||||
var defaultTaskCheckBoxParser = &taskCheckBoxParser{}
|
||||
|
||||
// NewTaskCheckBoxParser returns a new InlineParser that can parse
|
||||
// checkboxes in list items.
|
||||
// This parser must take precedence over the parser.LinkParser.
|
||||
func NewTaskCheckBoxParser() parser.InlineParser {
|
||||
return defaultTaskCheckBoxParser
|
||||
}
|
||||
|
||||
func (s *taskCheckBoxParser) Trigger() []byte {
|
||||
return []byte{'['}
|
||||
}
|
||||
|
||||
func (s *taskCheckBoxParser) Parse(parent gast.Node, block text.Reader, pc parser.Context) gast.Node {
|
||||
// Given AST structure must be like
|
||||
// - List
|
||||
// - ListItem : parent.Parent
|
||||
// - TextBlock : parent
|
||||
// (current line)
|
||||
if parent.Parent() == nil || parent.Parent().FirstChild() != parent {
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, ok := parent.Parent().(*gast.ListItem); !ok {
|
||||
return nil
|
||||
}
|
||||
line, _ := block.PeekLine()
|
||||
m := taskListRegexp.FindSubmatchIndex(line)
|
||||
if m == nil {
|
||||
return nil
|
||||
}
|
||||
value := line[m[2]:m[3]][0]
|
||||
block.Advance(m[1])
|
||||
checked := value == 'x' || value == 'X'
|
||||
return ast.NewTaskCheckBox(checked)
|
||||
}
|
||||
|
||||
func (s *taskCheckBoxParser) CloseBlock(parent gast.Node, pc parser.Context) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
// TaskCheckBoxHTMLRenderer is a renderer.NodeRenderer implementation that
|
||||
// renders checkboxes in list items.
|
||||
type TaskCheckBoxHTMLRenderer struct {
|
||||
html.Config
|
||||
}
|
||||
|
||||
// NewTaskCheckBoxHTMLRenderer returns a new TaskCheckBoxHTMLRenderer.
|
||||
func NewTaskCheckBoxHTMLRenderer(opts ...html.Option) renderer.NodeRenderer {
|
||||
r := &TaskCheckBoxHTMLRenderer{
|
||||
Config: html.NewConfig(),
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt.SetHTMLOption(&r.Config)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs.
|
||||
func (r *TaskCheckBoxHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindTaskCheckBox, r.renderTaskCheckBox)
|
||||
}
|
||||
|
||||
func (r *TaskCheckBoxHTMLRenderer) renderTaskCheckBox(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
n := node.(*ast.TaskCheckBox)
|
||||
|
||||
if n.IsChecked {
|
||||
w.WriteString(`<input checked="" disabled="" type="checkbox"`)
|
||||
} else {
|
||||
w.WriteString(`<input disabled="" type="checkbox"`)
|
||||
}
|
||||
if r.XHTML {
|
||||
w.WriteString(" /> ")
|
||||
} else {
|
||||
w.WriteString("> ")
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
||||
type taskList struct {
|
||||
}
|
||||
|
||||
// TaskList is an extension that allow you to use GFM task lists.
|
||||
var TaskList = &taskList{}
|
||||
|
||||
func (e *taskList) Extend(m goldmark.Markdown) {
|
||||
m.Parser().AddOptions(parser.WithInlineParsers(
|
||||
util.Prioritized(NewTaskCheckBoxParser(), 0),
|
||||
))
|
||||
m.Renderer().AddOptions(renderer.WithNodeRenderers(
|
||||
util.Prioritized(NewTaskCheckBoxHTMLRenderer(), 500),
|
||||
))
|
||||
}
|
|
@ -0,0 +1,339 @@
|
|||
package extension
|
||||
|
||||
import (
|
||||
"unicode"
|
||||
|
||||
"github.com/yuin/goldmark"
|
||||
gast "github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var uncloseCounterKey = parser.NewContextKey()
|
||||
|
||||
type unclosedCounter struct {
|
||||
Single int
|
||||
Double int
|
||||
}
|
||||
|
||||
func (u *unclosedCounter) Reset() {
|
||||
u.Single = 0
|
||||
u.Double = 0
|
||||
}
|
||||
|
||||
func getUnclosedCounter(pc parser.Context) *unclosedCounter {
|
||||
v := pc.Get(uncloseCounterKey)
|
||||
if v == nil {
|
||||
v = &unclosedCounter{}
|
||||
pc.Set(uncloseCounterKey, v)
|
||||
}
|
||||
return v.(*unclosedCounter)
|
||||
}
|
||||
|
||||
// TypographicPunctuation is a key of the punctuations that can be replaced with
|
||||
// typographic entities.
|
||||
type TypographicPunctuation int
|
||||
|
||||
const (
|
||||
// LeftSingleQuote is '
|
||||
LeftSingleQuote TypographicPunctuation = iota + 1
|
||||
// RightSingleQuote is '
|
||||
RightSingleQuote
|
||||
// LeftDoubleQuote is "
|
||||
LeftDoubleQuote
|
||||
// RightDoubleQuote is "
|
||||
RightDoubleQuote
|
||||
// EnDash is --
|
||||
EnDash
|
||||
// EmDash is ---
|
||||
EmDash
|
||||
// Ellipsis is ...
|
||||
Ellipsis
|
||||
// LeftAngleQuote is <<
|
||||
LeftAngleQuote
|
||||
// RightAngleQuote is >>
|
||||
RightAngleQuote
|
||||
// Apostrophe is '
|
||||
Apostrophe
|
||||
|
||||
typographicPunctuationMax
|
||||
)
|
||||
|
||||
// An TypographerConfig struct is a data structure that holds configuration of the
|
||||
// Typographer extension.
|
||||
type TypographerConfig struct {
|
||||
Substitutions [][]byte
|
||||
}
|
||||
|
||||
func newDefaultSubstitutions() [][]byte {
|
||||
replacements := make([][]byte, typographicPunctuationMax)
|
||||
replacements[LeftSingleQuote] = []byte("‘")
|
||||
replacements[RightSingleQuote] = []byte("’")
|
||||
replacements[LeftDoubleQuote] = []byte("“")
|
||||
replacements[RightDoubleQuote] = []byte("”")
|
||||
replacements[EnDash] = []byte("–")
|
||||
replacements[EmDash] = []byte("—")
|
||||
replacements[Ellipsis] = []byte("…")
|
||||
replacements[LeftAngleQuote] = []byte("«")
|
||||
replacements[RightAngleQuote] = []byte("»")
|
||||
replacements[Apostrophe] = []byte("’")
|
||||
|
||||
return replacements
|
||||
}
|
||||
|
||||
// SetOption implements SetOptioner.
|
||||
func (b *TypographerConfig) SetOption(name parser.OptionName, value interface{}) {
|
||||
switch name {
|
||||
case optTypographicSubstitutions:
|
||||
b.Substitutions = value.([][]byte)
|
||||
}
|
||||
}
|
||||
|
||||
// A TypographerOption interface sets options for the TypographerParser.
|
||||
type TypographerOption interface {
|
||||
parser.Option
|
||||
SetTypographerOption(*TypographerConfig)
|
||||
}
|
||||
|
||||
const optTypographicSubstitutions parser.OptionName = "TypographicSubstitutions"
|
||||
|
||||
// TypographicSubstitutions is a list of the substitutions for the Typographer extension.
|
||||
type TypographicSubstitutions map[TypographicPunctuation][]byte
|
||||
|
||||
type withTypographicSubstitutions struct {
|
||||
value [][]byte
|
||||
}
|
||||
|
||||
func (o *withTypographicSubstitutions) SetParserOption(c *parser.Config) {
|
||||
c.Options[optTypographicSubstitutions] = o.value
|
||||
}
|
||||
|
||||
func (o *withTypographicSubstitutions) SetTypographerOption(p *TypographerConfig) {
|
||||
p.Substitutions = o.value
|
||||
}
|
||||
|
||||
// WithTypographicSubstitutions is a functional otpion that specify replacement text
|
||||
// for punctuations.
|
||||
func WithTypographicSubstitutions(values map[TypographicPunctuation][]byte) TypographerOption {
|
||||
replacements := newDefaultSubstitutions()
|
||||
for k, v := range values {
|
||||
replacements[k] = v
|
||||
}
|
||||
|
||||
return &withTypographicSubstitutions{replacements}
|
||||
}
|
||||
|
||||
type typographerDelimiterProcessor struct {
|
||||
}
|
||||
|
||||
func (p *typographerDelimiterProcessor) IsDelimiter(b byte) bool {
|
||||
return b == '\'' || b == '"'
|
||||
}
|
||||
|
||||
func (p *typographerDelimiterProcessor) CanOpenCloser(opener, closer *parser.Delimiter) bool {
|
||||
return opener.Char == closer.Char
|
||||
}
|
||||
|
||||
func (p *typographerDelimiterProcessor) OnMatch(consumes int) gast.Node {
|
||||
return nil
|
||||
}
|
||||
|
||||
var defaultTypographerDelimiterProcessor = &typographerDelimiterProcessor{}
|
||||
|
||||
type typographerParser struct {
|
||||
TypographerConfig
|
||||
}
|
||||
|
||||
// NewTypographerParser return a new InlineParser that parses
|
||||
// typographer expressions.
|
||||
func NewTypographerParser(opts ...TypographerOption) parser.InlineParser {
|
||||
p := &typographerParser{
|
||||
TypographerConfig: TypographerConfig{
|
||||
Substitutions: newDefaultSubstitutions(),
|
||||
},
|
||||
}
|
||||
for _, o := range opts {
|
||||
o.SetTypographerOption(&p.TypographerConfig)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func (s *typographerParser) Trigger() []byte {
|
||||
return []byte{'\'', '"', '-', '.', ',', '<', '>', '*', '['}
|
||||
}
|
||||
|
||||
func (s *typographerParser) Parse(parent gast.Node, block text.Reader, pc parser.Context) gast.Node {
|
||||
line, _ := block.PeekLine()
|
||||
c := line[0]
|
||||
if len(line) > 2 {
|
||||
if c == '-' {
|
||||
if s.Substitutions[EmDash] != nil && line[1] == '-' && line[2] == '-' { // ---
|
||||
node := gast.NewString(s.Substitutions[EmDash])
|
||||
node.SetCode(true)
|
||||
block.Advance(3)
|
||||
return node
|
||||
}
|
||||
} else if c == '.' {
|
||||
if s.Substitutions[Ellipsis] != nil && line[1] == '.' && line[2] == '.' { // ...
|
||||
node := gast.NewString(s.Substitutions[Ellipsis])
|
||||
node.SetCode(true)
|
||||
block.Advance(3)
|
||||
return node
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if len(line) > 1 {
|
||||
if c == '<' {
|
||||
if s.Substitutions[LeftAngleQuote] != nil && line[1] == '<' { // <<
|
||||
node := gast.NewString(s.Substitutions[LeftAngleQuote])
|
||||
node.SetCode(true)
|
||||
block.Advance(2)
|
||||
return node
|
||||
}
|
||||
return nil
|
||||
} else if c == '>' {
|
||||
if s.Substitutions[RightAngleQuote] != nil && line[1] == '>' { // >>
|
||||
node := gast.NewString(s.Substitutions[RightAngleQuote])
|
||||
node.SetCode(true)
|
||||
block.Advance(2)
|
||||
return node
|
||||
}
|
||||
return nil
|
||||
} else if s.Substitutions[EnDash] != nil && c == '-' && line[1] == '-' { // --
|
||||
node := gast.NewString(s.Substitutions[EnDash])
|
||||
node.SetCode(true)
|
||||
block.Advance(2)
|
||||
return node
|
||||
}
|
||||
}
|
||||
if c == '\'' || c == '"' {
|
||||
before := block.PrecendingCharacter()
|
||||
d := parser.ScanDelimiter(line, before, 1, defaultTypographerDelimiterProcessor)
|
||||
if d == nil {
|
||||
return nil
|
||||
}
|
||||
counter := getUnclosedCounter(pc)
|
||||
if c == '\'' {
|
||||
if s.Substitutions[Apostrophe] != nil {
|
||||
// Handle decade abbrevations such as '90s
|
||||
if d.CanOpen && !d.CanClose && len(line) > 3 && util.IsNumeric(line[1]) && util.IsNumeric(line[2]) && line[3] == 's' {
|
||||
after := rune(' ')
|
||||
if len(line) > 4 {
|
||||
after = util.ToRune(line, 4)
|
||||
}
|
||||
if len(line) == 3 || util.IsSpaceRune(after) || util.IsPunctRune(after) {
|
||||
node := gast.NewString(s.Substitutions[Apostrophe])
|
||||
node.SetCode(true)
|
||||
block.Advance(1)
|
||||
return node
|
||||
}
|
||||
}
|
||||
// special cases: 'twas, 'em, 'net
|
||||
if len(line) > 1 && (unicode.IsPunct(before) || unicode.IsSpace(before)) && (line[1] == 't' || line[1] == 'e' || line[1] == 'n' || line[1] == 'l') {
|
||||
node := gast.NewString(s.Substitutions[Apostrophe])
|
||||
node.SetCode(true)
|
||||
block.Advance(1)
|
||||
return node
|
||||
}
|
||||
// Convert normal apostrophes. This is probably more flexible than necessary but
|
||||
// converts any apostrophe in between two alphanumerics.
|
||||
if len(line) > 1 && (unicode.IsDigit(before) || unicode.IsLetter(before)) && (unicode.IsLetter(util.ToRune(line, 1))) {
|
||||
node := gast.NewString(s.Substitutions[Apostrophe])
|
||||
node.SetCode(true)
|
||||
block.Advance(1)
|
||||
return node
|
||||
}
|
||||
}
|
||||
if s.Substitutions[LeftSingleQuote] != nil && d.CanOpen && !d.CanClose {
|
||||
nt := LeftSingleQuote
|
||||
// special cases: Alice's, I'm, Don't, You'd
|
||||
if len(line) > 1 && (line[1] == 's' || line[1] == 'm' || line[1] == 't' || line[1] == 'd') && (len(line) < 3 || util.IsPunct(line[2]) || util.IsSpace(line[2])) {
|
||||
nt = RightSingleQuote
|
||||
}
|
||||
// special cases: I've, I'll, You're
|
||||
if len(line) > 2 && ((line[1] == 'v' && line[2] == 'e') || (line[1] == 'l' && line[2] == 'l') || (line[1] == 'r' && line[2] == 'e')) && (len(line) < 4 || util.IsPunct(line[3]) || util.IsSpace(line[3])) {
|
||||
nt = RightSingleQuote
|
||||
}
|
||||
if nt == LeftSingleQuote {
|
||||
counter.Single++
|
||||
}
|
||||
|
||||
node := gast.NewString(s.Substitutions[nt])
|
||||
node.SetCode(true)
|
||||
block.Advance(1)
|
||||
return node
|
||||
}
|
||||
if s.Substitutions[RightSingleQuote] != nil {
|
||||
// plural possesives and abbreviations: Smiths', doin'
|
||||
if len(line) > 1 && unicode.IsSpace(util.ToRune(line, 0)) || unicode.IsPunct(util.ToRune(line, 0)) && (len(line) > 2 && !unicode.IsDigit(util.ToRune(line, 1))) {
|
||||
node := gast.NewString(s.Substitutions[RightSingleQuote])
|
||||
node.SetCode(true)
|
||||
block.Advance(1)
|
||||
return node
|
||||
}
|
||||
}
|
||||
if s.Substitutions[RightSingleQuote] != nil && counter.Single > 0 {
|
||||
isClose := d.CanClose && !d.CanOpen
|
||||
maybeClose := d.CanClose && d.CanOpen && len(line) > 1 && unicode.IsPunct(util.ToRune(line, 1)) && (len(line) == 2 || (len(line) > 2 && util.IsPunct(line[2]) || util.IsSpace(line[2])))
|
||||
if isClose || maybeClose {
|
||||
node := gast.NewString(s.Substitutions[RightSingleQuote])
|
||||
node.SetCode(true)
|
||||
block.Advance(1)
|
||||
counter.Single--
|
||||
return node
|
||||
}
|
||||
}
|
||||
}
|
||||
if c == '"' {
|
||||
if s.Substitutions[LeftDoubleQuote] != nil && d.CanOpen && !d.CanClose {
|
||||
node := gast.NewString(s.Substitutions[LeftDoubleQuote])
|
||||
node.SetCode(true)
|
||||
block.Advance(1)
|
||||
counter.Double++
|
||||
return node
|
||||
}
|
||||
if s.Substitutions[RightDoubleQuote] != nil && counter.Double > 0 {
|
||||
isClose := d.CanClose && !d.CanOpen
|
||||
maybeClose := d.CanClose && d.CanOpen && len(line) > 1 && (unicode.IsPunct(util.ToRune(line, 1))) && (len(line) == 2 || (len(line) > 2 && util.IsPunct(line[2]) || util.IsSpace(line[2])))
|
||||
if isClose || maybeClose {
|
||||
// special case: "Monitor 21""
|
||||
if len(line) > 1 && line[1] == '"' && unicode.IsDigit(before) {
|
||||
return nil
|
||||
}
|
||||
node := gast.NewString(s.Substitutions[RightDoubleQuote])
|
||||
node.SetCode(true)
|
||||
block.Advance(1)
|
||||
counter.Double--
|
||||
return node
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *typographerParser) CloseBlock(parent gast.Node, pc parser.Context) {
|
||||
getUnclosedCounter(pc).Reset()
|
||||
}
|
||||
|
||||
type typographer struct {
|
||||
options []TypographerOption
|
||||
}
|
||||
|
||||
// Typographer is an extension that replaces punctuations with typographic entities.
|
||||
var Typographer = &typographer{}
|
||||
|
||||
// NewTypographer returns a new Extender that replaces punctuations with typographic entities.
|
||||
func NewTypographer(opts ...TypographerOption) goldmark.Extender {
|
||||
return &typographer{
|
||||
options: opts,
|
||||
}
|
||||
}
|
||||
|
||||
func (e *typographer) Extend(m goldmark.Markdown) {
|
||||
m.Parser().AddOptions(parser.WithInlineParsers(
|
||||
util.Prioritized(NewTypographerParser(e.options...), 9999),
|
||||
))
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
// Package goldmark implements functions to convert markdown text to a desired format.
|
||||
package goldmark
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
"io"
|
||||
)
|
||||
|
||||
// DefaultParser returns a new Parser that is configured by default values.
|
||||
func DefaultParser() parser.Parser {
|
||||
return parser.NewParser(parser.WithBlockParsers(parser.DefaultBlockParsers()...),
|
||||
parser.WithInlineParsers(parser.DefaultInlineParsers()...),
|
||||
parser.WithParagraphTransformers(parser.DefaultParagraphTransformers()...),
|
||||
)
|
||||
}
|
||||
|
||||
// DefaultRenderer returns a new Renderer that is configured by default values.
|
||||
func DefaultRenderer() renderer.Renderer {
|
||||
return renderer.NewRenderer(renderer.WithNodeRenderers(util.Prioritized(html.NewRenderer(), 1000)))
|
||||
}
|
||||
|
||||
var defaultMarkdown = New()
|
||||
|
||||
// Convert interprets a UTF-8 bytes source in Markdown and
|
||||
// write rendered contents to a writer w.
|
||||
func Convert(source []byte, w io.Writer, opts ...parser.ParseOption) error {
|
||||
return defaultMarkdown.Convert(source, w, opts...)
|
||||
}
|
||||
|
||||
// A Markdown interface offers functions to convert Markdown text to
|
||||
// a desired format.
|
||||
type Markdown interface {
|
||||
// Convert interprets a UTF-8 bytes source in Markdown and write rendered
|
||||
// contents to a writer w.
|
||||
Convert(source []byte, writer io.Writer, opts ...parser.ParseOption) error
|
||||
|
||||
// Parser returns a Parser that will be used for conversion.
|
||||
Parser() parser.Parser
|
||||
|
||||
// SetParser sets a Parser to this object.
|
||||
SetParser(parser.Parser)
|
||||
|
||||
// Parser returns a Renderer that will be used for conversion.
|
||||
Renderer() renderer.Renderer
|
||||
|
||||
// SetRenderer sets a Renderer to this object.
|
||||
SetRenderer(renderer.Renderer)
|
||||
}
|
||||
|
||||
// Option is a functional option type for Markdown objects.
|
||||
type Option func(*markdown)
|
||||
|
||||
// WithExtensions adds extensions.
|
||||
func WithExtensions(ext ...Extender) Option {
|
||||
return func(m *markdown) {
|
||||
m.extensions = append(m.extensions, ext...)
|
||||
}
|
||||
}
|
||||
|
||||
// WithParser allows you to override the default parser.
|
||||
func WithParser(p parser.Parser) Option {
|
||||
return func(m *markdown) {
|
||||
m.parser = p
|
||||
}
|
||||
}
|
||||
|
||||
// WithParserOptions applies options for the parser.
|
||||
func WithParserOptions(opts ...parser.Option) Option {
|
||||
return func(m *markdown) {
|
||||
m.parser.AddOptions(opts...)
|
||||
}
|
||||
}
|
||||
|
||||
// WithRenderer allows you to override the default renderer.
|
||||
func WithRenderer(r renderer.Renderer) Option {
|
||||
return func(m *markdown) {
|
||||
m.renderer = r
|
||||
}
|
||||
}
|
||||
|
||||
// WithRendererOptions applies options for the renderer.
|
||||
func WithRendererOptions(opts ...renderer.Option) Option {
|
||||
return func(m *markdown) {
|
||||
m.renderer.AddOptions(opts...)
|
||||
}
|
||||
}
|
||||
|
||||
type markdown struct {
|
||||
parser parser.Parser
|
||||
renderer renderer.Renderer
|
||||
extensions []Extender
|
||||
}
|
||||
|
||||
// New returns a new Markdown with given options.
|
||||
func New(options ...Option) Markdown {
|
||||
md := &markdown{
|
||||
parser: DefaultParser(),
|
||||
renderer: DefaultRenderer(),
|
||||
extensions: []Extender{},
|
||||
}
|
||||
for _, opt := range options {
|
||||
opt(md)
|
||||
}
|
||||
for _, e := range md.extensions {
|
||||
e.Extend(md)
|
||||
}
|
||||
return md
|
||||
}
|
||||
|
||||
func (m *markdown) Convert(source []byte, writer io.Writer, opts ...parser.ParseOption) error {
|
||||
reader := text.NewReader(source)
|
||||
doc := m.parser.Parse(reader, opts...)
|
||||
return m.renderer.Render(writer, source, doc)
|
||||
}
|
||||
|
||||
func (m *markdown) Parser() parser.Parser {
|
||||
return m.parser
|
||||
}
|
||||
|
||||
func (m *markdown) SetParser(v parser.Parser) {
|
||||
m.parser = v
|
||||
}
|
||||
|
||||
func (m *markdown) Renderer() renderer.Renderer {
|
||||
return m.renderer
|
||||
}
|
||||
|
||||
func (m *markdown) SetRenderer(v renderer.Renderer) {
|
||||
m.renderer = v
|
||||
}
|
||||
|
||||
// An Extender interface is used for extending Markdown.
|
||||
type Extender interface {
|
||||
// Extend extends the Markdown.
|
||||
Extend(Markdown)
|
||||
}
|
|
@ -0,0 +1,328 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"strconv"
|
||||
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var attrNameID = []byte("id")
|
||||
var attrNameClass = []byte("class")
|
||||
|
||||
// An Attribute is an attribute of the markdown elements
|
||||
type Attribute struct {
|
||||
Name []byte
|
||||
Value interface{}
|
||||
}
|
||||
|
||||
// An Attributes is a collection of attributes.
|
||||
type Attributes []Attribute
|
||||
|
||||
// Find returns a (value, true) if an attribute correspond with given name is found, otherwise (nil, false).
|
||||
func (as Attributes) Find(name []byte) (interface{}, bool) {
|
||||
for _, a := range as {
|
||||
if bytes.Equal(a.Name, name) {
|
||||
return a.Value, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (as Attributes) findUpdate(name []byte, cb func(v interface{}) interface{}) bool {
|
||||
for i, a := range as {
|
||||
if bytes.Equal(a.Name, name) {
|
||||
as[i].Value = cb(a.Value)
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// ParseAttributes parses attributes into a map.
|
||||
// ParseAttributes returns a parsed attributes and true if could parse
|
||||
// attributes, otherwise nil and false.
|
||||
func ParseAttributes(reader text.Reader) (Attributes, bool) {
|
||||
savedLine, savedPosition := reader.Position()
|
||||
reader.SkipSpaces()
|
||||
if reader.Peek() != '{' {
|
||||
reader.SetPosition(savedLine, savedPosition)
|
||||
return nil, false
|
||||
}
|
||||
reader.Advance(1)
|
||||
attrs := Attributes{}
|
||||
for {
|
||||
if reader.Peek() == '}' {
|
||||
reader.Advance(1)
|
||||
return attrs, true
|
||||
}
|
||||
attr, ok := parseAttribute(reader)
|
||||
if !ok {
|
||||
reader.SetPosition(savedLine, savedPosition)
|
||||
return nil, false
|
||||
}
|
||||
if bytes.Equal(attr.Name, attrNameClass) {
|
||||
if !attrs.findUpdate(attrNameClass, func(v interface{}) interface{} {
|
||||
ret := make([]byte, 0, len(v.([]byte))+1+len(attr.Value.([]byte)))
|
||||
ret = append(ret, v.([]byte)...)
|
||||
return append(append(ret, ' '), attr.Value.([]byte)...)
|
||||
}) {
|
||||
attrs = append(attrs, attr)
|
||||
}
|
||||
} else {
|
||||
attrs = append(attrs, attr)
|
||||
}
|
||||
reader.SkipSpaces()
|
||||
if reader.Peek() == ',' {
|
||||
reader.Advance(1)
|
||||
reader.SkipSpaces()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func parseAttribute(reader text.Reader) (Attribute, bool) {
|
||||
reader.SkipSpaces()
|
||||
c := reader.Peek()
|
||||
if c == '#' || c == '.' {
|
||||
reader.Advance(1)
|
||||
line, _ := reader.PeekLine()
|
||||
i := 0
|
||||
// HTML5 allows any kind of characters as id, but XHTML restricts characters for id.
|
||||
// CommonMark is basically defined for XHTML(even though it is legacy).
|
||||
// So we restrict id characters.
|
||||
for ; i < len(line) && !util.IsSpace(line[i]) &&
|
||||
(!util.IsPunct(line[i]) || line[i] == '_' || line[i] == '-' || line[i] == ':' || line[i] == '.'); i++ {
|
||||
}
|
||||
name := attrNameClass
|
||||
if c == '#' {
|
||||
name = attrNameID
|
||||
}
|
||||
reader.Advance(i)
|
||||
return Attribute{Name: name, Value: line[0:i]}, true
|
||||
}
|
||||
line, _ := reader.PeekLine()
|
||||
if len(line) == 0 {
|
||||
return Attribute{}, false
|
||||
}
|
||||
c = line[0]
|
||||
if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
|
||||
c == '_' || c == ':') {
|
||||
return Attribute{}, false
|
||||
}
|
||||
i := 0
|
||||
for ; i < len(line); i++ {
|
||||
c = line[i]
|
||||
if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
|
||||
(c >= '0' && c <= '9') ||
|
||||
c == '_' || c == ':' || c == '.' || c == '-') {
|
||||
break
|
||||
}
|
||||
}
|
||||
name := line[:i]
|
||||
reader.Advance(i)
|
||||
reader.SkipSpaces()
|
||||
c = reader.Peek()
|
||||
if c != '=' {
|
||||
return Attribute{}, false
|
||||
}
|
||||
reader.Advance(1)
|
||||
reader.SkipSpaces()
|
||||
value, ok := parseAttributeValue(reader)
|
||||
if !ok {
|
||||
return Attribute{}, false
|
||||
}
|
||||
if bytes.Equal(name, attrNameClass) {
|
||||
if _, ok = value.([]byte); !ok {
|
||||
return Attribute{}, false
|
||||
}
|
||||
}
|
||||
return Attribute{Name: name, Value: value}, true
|
||||
}
|
||||
|
||||
func parseAttributeValue(reader text.Reader) (interface{}, bool) {
|
||||
reader.SkipSpaces()
|
||||
c := reader.Peek()
|
||||
var value interface{}
|
||||
ok := false
|
||||
switch c {
|
||||
case text.EOF:
|
||||
return Attribute{}, false
|
||||
case '{':
|
||||
value, ok = ParseAttributes(reader)
|
||||
case '[':
|
||||
value, ok = parseAttributeArray(reader)
|
||||
case '"':
|
||||
value, ok = parseAttributeString(reader)
|
||||
default:
|
||||
if c == '-' || c == '+' || util.IsNumeric(c) {
|
||||
value, ok = parseAttributeNumber(reader)
|
||||
} else {
|
||||
value, ok = parseAttributeOthers(reader)
|
||||
}
|
||||
}
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
return value, true
|
||||
}
|
||||
|
||||
func parseAttributeArray(reader text.Reader) ([]interface{}, bool) {
|
||||
reader.Advance(1) // skip [
|
||||
ret := []interface{}{}
|
||||
for i := 0; ; i++ {
|
||||
c := reader.Peek()
|
||||
comma := false
|
||||
if i != 0 && c == ',' {
|
||||
reader.Advance(1)
|
||||
comma = true
|
||||
}
|
||||
if c == ']' {
|
||||
if !comma {
|
||||
reader.Advance(1)
|
||||
return ret, true
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
reader.SkipSpaces()
|
||||
value, ok := parseAttributeValue(reader)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
ret = append(ret, value)
|
||||
reader.SkipSpaces()
|
||||
}
|
||||
}
|
||||
|
||||
func parseAttributeString(reader text.Reader) ([]byte, bool) {
|
||||
reader.Advance(1) // skip "
|
||||
line, _ := reader.PeekLine()
|
||||
i := 0
|
||||
l := len(line)
|
||||
var buf bytes.Buffer
|
||||
for i < l {
|
||||
c := line[i]
|
||||
if c == '\\' && i != l-1 {
|
||||
n := line[i+1]
|
||||
switch n {
|
||||
case '"', '/', '\\':
|
||||
buf.WriteByte(n)
|
||||
i += 2
|
||||
case 'b':
|
||||
buf.WriteString("\b")
|
||||
i += 2
|
||||
case 'f':
|
||||
buf.WriteString("\f")
|
||||
i += 2
|
||||
case 'n':
|
||||
buf.WriteString("\n")
|
||||
i += 2
|
||||
case 'r':
|
||||
buf.WriteString("\r")
|
||||
i += 2
|
||||
case 't':
|
||||
buf.WriteString("\t")
|
||||
i += 2
|
||||
default:
|
||||
buf.WriteByte('\\')
|
||||
i++
|
||||
}
|
||||
continue
|
||||
}
|
||||
if c == '"' {
|
||||
reader.Advance(i + 1)
|
||||
return buf.Bytes(), true
|
||||
}
|
||||
buf.WriteByte(c)
|
||||
i++
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func scanAttributeDecimal(reader text.Reader, w io.ByteWriter) {
|
||||
for {
|
||||
c := reader.Peek()
|
||||
if util.IsNumeric(c) {
|
||||
w.WriteByte(c)
|
||||
} else {
|
||||
return
|
||||
}
|
||||
reader.Advance(1)
|
||||
}
|
||||
}
|
||||
|
||||
func parseAttributeNumber(reader text.Reader) (float64, bool) {
|
||||
sign := 1
|
||||
c := reader.Peek()
|
||||
if c == '-' {
|
||||
sign = -1
|
||||
reader.Advance(1)
|
||||
} else if c == '+' {
|
||||
reader.Advance(1)
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
if !util.IsNumeric(reader.Peek()) {
|
||||
return 0, false
|
||||
}
|
||||
scanAttributeDecimal(reader, &buf)
|
||||
if buf.Len() == 0 {
|
||||
return 0, false
|
||||
}
|
||||
c = reader.Peek()
|
||||
if c == '.' {
|
||||
buf.WriteByte(c)
|
||||
reader.Advance(1)
|
||||
scanAttributeDecimal(reader, &buf)
|
||||
}
|
||||
c = reader.Peek()
|
||||
if c == 'e' || c == 'E' {
|
||||
buf.WriteByte(c)
|
||||
reader.Advance(1)
|
||||
c = reader.Peek()
|
||||
if c == '-' || c == '+' {
|
||||
buf.WriteByte(c)
|
||||
reader.Advance(1)
|
||||
}
|
||||
scanAttributeDecimal(reader, &buf)
|
||||
}
|
||||
f, err := strconv.ParseFloat(buf.String(), 10)
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
return float64(sign) * f, true
|
||||
}
|
||||
|
||||
var bytesTrue = []byte("true")
|
||||
var bytesFalse = []byte("false")
|
||||
var bytesNull = []byte("null")
|
||||
|
||||
func parseAttributeOthers(reader text.Reader) (interface{}, bool) {
|
||||
line, _ := reader.PeekLine()
|
||||
c := line[0]
|
||||
if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
|
||||
c == '_' || c == ':') {
|
||||
return nil, false
|
||||
}
|
||||
i := 0
|
||||
for ; i < len(line); i++ {
|
||||
c := line[i]
|
||||
if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
|
||||
(c >= '0' && c <= '9') ||
|
||||
c == '_' || c == ':' || c == '.' || c == '-') {
|
||||
break
|
||||
}
|
||||
}
|
||||
value := line[:i]
|
||||
reader.Advance(i)
|
||||
if bytes.Equal(value, bytesTrue) {
|
||||
return true, true
|
||||
}
|
||||
if bytes.Equal(value, bytesFalse) {
|
||||
return false, true
|
||||
}
|
||||
if bytes.Equal(value, bytesNull) {
|
||||
return nil, true
|
||||
}
|
||||
return value, true
|
||||
}
|
|
@ -0,0 +1,246 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
// A HeadingConfig struct is a data structure that holds configuration of the renderers related to headings.
|
||||
type HeadingConfig struct {
|
||||
AutoHeadingID bool
|
||||
Attribute bool
|
||||
}
|
||||
|
||||
// SetOption implements SetOptioner.
|
||||
func (b *HeadingConfig) SetOption(name OptionName, value interface{}) {
|
||||
switch name {
|
||||
case optAutoHeadingID:
|
||||
b.AutoHeadingID = true
|
||||
case optAttribute:
|
||||
b.Attribute = true
|
||||
}
|
||||
}
|
||||
|
||||
// A HeadingOption interface sets options for heading parsers.
|
||||
type HeadingOption interface {
|
||||
Option
|
||||
SetHeadingOption(*HeadingConfig)
|
||||
}
|
||||
|
||||
// AutoHeadingID is an option name that enables auto IDs for headings.
|
||||
const optAutoHeadingID OptionName = "AutoHeadingID"
|
||||
|
||||
type withAutoHeadingID struct {
|
||||
}
|
||||
|
||||
func (o *withAutoHeadingID) SetParserOption(c *Config) {
|
||||
c.Options[optAutoHeadingID] = true
|
||||
}
|
||||
|
||||
func (o *withAutoHeadingID) SetHeadingOption(p *HeadingConfig) {
|
||||
p.AutoHeadingID = true
|
||||
}
|
||||
|
||||
// WithAutoHeadingID is a functional option that enables custom heading ids and
|
||||
// auto generated heading ids.
|
||||
func WithAutoHeadingID() HeadingOption {
|
||||
return &withAutoHeadingID{}
|
||||
}
|
||||
|
||||
type withHeadingAttribute struct {
|
||||
Option
|
||||
}
|
||||
|
||||
func (o *withHeadingAttribute) SetHeadingOption(p *HeadingConfig) {
|
||||
p.Attribute = true
|
||||
}
|
||||
|
||||
// WithHeadingAttribute is a functional option that enables custom heading attributes.
|
||||
func WithHeadingAttribute() HeadingOption {
|
||||
return &withHeadingAttribute{WithAttribute()}
|
||||
}
|
||||
|
||||
type atxHeadingParser struct {
|
||||
HeadingConfig
|
||||
}
|
||||
|
||||
// NewATXHeadingParser return a new BlockParser that can parse ATX headings.
|
||||
func NewATXHeadingParser(opts ...HeadingOption) BlockParser {
|
||||
p := &atxHeadingParser{}
|
||||
for _, o := range opts {
|
||||
o.SetHeadingOption(&p.HeadingConfig)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func (b *atxHeadingParser) Trigger() []byte {
|
||||
return []byte{'#'}
|
||||
}
|
||||
|
||||
func (b *atxHeadingParser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
line, segment := reader.PeekLine()
|
||||
pos := pc.BlockOffset()
|
||||
if pos < 0 {
|
||||
return nil, NoChildren
|
||||
}
|
||||
i := pos
|
||||
for ; i < len(line) && line[i] == '#'; i++ {
|
||||
}
|
||||
level := i - pos
|
||||
if i == pos || level > 6 {
|
||||
return nil, NoChildren
|
||||
}
|
||||
if i == len(line) { // alone '#' (without a new line character)
|
||||
return ast.NewHeading(level), NoChildren
|
||||
}
|
||||
l := util.TrimLeftSpaceLength(line[i:])
|
||||
if l == 0 {
|
||||
return nil, NoChildren
|
||||
}
|
||||
start := i + l
|
||||
if start >= len(line) {
|
||||
start = len(line) - 1
|
||||
}
|
||||
origstart := start
|
||||
stop := len(line) - util.TrimRightSpaceLength(line)
|
||||
|
||||
node := ast.NewHeading(level)
|
||||
parsed := false
|
||||
if b.Attribute { // handles special case like ### heading ### {#id}
|
||||
start--
|
||||
closureClose := -1
|
||||
closureOpen := -1
|
||||
for j := start; j < stop; {
|
||||
c := line[j]
|
||||
if util.IsEscapedPunctuation(line, j) {
|
||||
j += 2
|
||||
} else if util.IsSpace(c) && j < stop-1 && line[j+1] == '#' {
|
||||
closureOpen = j + 1
|
||||
k := j + 1
|
||||
for ; k < stop && line[k] == '#'; k++ {
|
||||
}
|
||||
closureClose = k
|
||||
break
|
||||
} else {
|
||||
j++
|
||||
}
|
||||
}
|
||||
if closureClose > 0 {
|
||||
reader.Advance(closureClose)
|
||||
attrs, ok := ParseAttributes(reader)
|
||||
rest, _ := reader.PeekLine()
|
||||
parsed = ok && util.IsBlank(rest)
|
||||
if parsed {
|
||||
for _, attr := range attrs {
|
||||
node.SetAttribute(attr.Name, attr.Value)
|
||||
}
|
||||
node.Lines().Append(text.NewSegment(segment.Start+start+1-segment.Padding, segment.Start+closureOpen-segment.Padding))
|
||||
}
|
||||
}
|
||||
}
|
||||
if !parsed {
|
||||
start = origstart
|
||||
stop := len(line) - util.TrimRightSpaceLength(line)
|
||||
if stop <= start { // empty headings like '##[space]'
|
||||
stop = start
|
||||
} else {
|
||||
i = stop - 1
|
||||
for ; line[i] == '#' && i >= start; i-- {
|
||||
}
|
||||
if i != stop-1 && !util.IsSpace(line[i]) {
|
||||
i = stop - 1
|
||||
}
|
||||
i++
|
||||
stop = i
|
||||
}
|
||||
|
||||
if len(util.TrimRight(line[start:stop], []byte{'#'})) != 0 { // empty heading like '### ###'
|
||||
node.Lines().Append(text.NewSegment(segment.Start+start-segment.Padding, segment.Start+stop-segment.Padding))
|
||||
}
|
||||
}
|
||||
return node, NoChildren
|
||||
}
|
||||
|
||||
func (b *atxHeadingParser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
return Close
|
||||
}
|
||||
|
||||
func (b *atxHeadingParser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
if b.Attribute {
|
||||
_, ok := node.AttributeString("id")
|
||||
if !ok {
|
||||
parseLastLineAttributes(node, reader, pc)
|
||||
}
|
||||
}
|
||||
|
||||
if b.AutoHeadingID {
|
||||
id, ok := node.AttributeString("id")
|
||||
if !ok {
|
||||
generateAutoHeadingID(node.(*ast.Heading), reader, pc)
|
||||
} else {
|
||||
pc.IDs().Put(id.([]byte))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *atxHeadingParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *atxHeadingParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func generateAutoHeadingID(node *ast.Heading, reader text.Reader, pc Context) {
|
||||
var line []byte
|
||||
lastIndex := node.Lines().Len() - 1
|
||||
if lastIndex > -1 {
|
||||
lastLine := node.Lines().At(lastIndex)
|
||||
line = lastLine.Value(reader.Source())
|
||||
}
|
||||
headingID := pc.IDs().Generate(line, ast.KindHeading)
|
||||
node.SetAttribute(attrNameID, headingID)
|
||||
}
|
||||
|
||||
func parseLastLineAttributes(node ast.Node, reader text.Reader, pc Context) {
|
||||
lastIndex := node.Lines().Len() - 1
|
||||
if lastIndex < 0 { // empty headings
|
||||
return
|
||||
}
|
||||
lastLine := node.Lines().At(lastIndex)
|
||||
line := lastLine.Value(reader.Source())
|
||||
lr := text.NewReader(line)
|
||||
var attrs Attributes
|
||||
var ok bool
|
||||
var start text.Segment
|
||||
var sl int
|
||||
var end text.Segment
|
||||
for {
|
||||
c := lr.Peek()
|
||||
if c == text.EOF {
|
||||
break
|
||||
}
|
||||
if c == '\\' {
|
||||
lr.Advance(1)
|
||||
if lr.Peek() == '{' {
|
||||
lr.Advance(1)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if c == '{' {
|
||||
sl, start = lr.Position()
|
||||
attrs, ok = ParseAttributes(lr)
|
||||
_, end = lr.Position()
|
||||
lr.SetPosition(sl, start)
|
||||
}
|
||||
lr.Advance(1)
|
||||
}
|
||||
if ok && util.IsBlank(line[end.Start:]) {
|
||||
for _, attr := range attrs {
|
||||
node.SetAttribute(attr.Name, attr.Value)
|
||||
}
|
||||
lastLine.Stop = lastLine.Start + start.Start
|
||||
node.Lines().Set(lastIndex, lastLine)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type autoLinkParser struct {
|
||||
}
|
||||
|
||||
var defaultAutoLinkParser = &autoLinkParser{}
|
||||
|
||||
// NewAutoLinkParser returns a new InlineParser that parses autolinks
|
||||
// surrounded by '<' and '>' .
|
||||
func NewAutoLinkParser() InlineParser {
|
||||
return defaultAutoLinkParser
|
||||
}
|
||||
|
||||
func (s *autoLinkParser) Trigger() []byte {
|
||||
return []byte{'<'}
|
||||
}
|
||||
|
||||
func (s *autoLinkParser) Parse(parent ast.Node, block text.Reader, pc Context) ast.Node {
|
||||
line, segment := block.PeekLine()
|
||||
stop := util.FindEmailIndex(line[1:])
|
||||
typ := ast.AutoLinkType(ast.AutoLinkEmail)
|
||||
if stop < 0 {
|
||||
stop = util.FindURLIndex(line[1:])
|
||||
typ = ast.AutoLinkURL
|
||||
}
|
||||
if stop < 0 {
|
||||
return nil
|
||||
}
|
||||
stop++
|
||||
if stop >= len(line) || line[stop] != '>' {
|
||||
return nil
|
||||
}
|
||||
value := ast.NewTextSegment(text.NewSegment(segment.Start+1, segment.Start+stop))
|
||||
block.Advance(stop + 1)
|
||||
return ast.NewAutoLink(typ, value)
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type blockquoteParser struct {
|
||||
}
|
||||
|
||||
var defaultBlockquoteParser = &blockquoteParser{}
|
||||
|
||||
// NewBlockquoteParser returns a new BlockParser that
|
||||
// parses blockquotes.
|
||||
func NewBlockquoteParser() BlockParser {
|
||||
return defaultBlockquoteParser
|
||||
}
|
||||
|
||||
func (b *blockquoteParser) process(reader text.Reader) bool {
|
||||
line, _ := reader.PeekLine()
|
||||
w, pos := util.IndentWidth(line, reader.LineOffset())
|
||||
if w > 3 || pos >= len(line) || line[pos] != '>' {
|
||||
return false
|
||||
}
|
||||
pos++
|
||||
if pos >= len(line) || line[pos] == '\n' {
|
||||
reader.Advance(pos)
|
||||
return true
|
||||
}
|
||||
if line[pos] == ' ' || line[pos] == '\t' {
|
||||
pos++
|
||||
}
|
||||
reader.Advance(pos)
|
||||
if line[pos-1] == '\t' {
|
||||
reader.SetPadding(2)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *blockquoteParser) Trigger() []byte {
|
||||
return []byte{'>'}
|
||||
}
|
||||
|
||||
func (b *blockquoteParser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
if b.process(reader) {
|
||||
return ast.NewBlockquote(), HasChildren
|
||||
}
|
||||
return nil, NoChildren
|
||||
}
|
||||
|
||||
func (b *blockquoteParser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
if b.process(reader) {
|
||||
return Continue | HasChildren
|
||||
}
|
||||
return Close
|
||||
}
|
||||
|
||||
func (b *blockquoteParser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
func (b *blockquoteParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *blockquoteParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,100 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type codeBlockParser struct {
|
||||
}
|
||||
|
||||
// CodeBlockParser is a BlockParser implementation that parses indented code blocks.
|
||||
var defaultCodeBlockParser = &codeBlockParser{}
|
||||
|
||||
// NewCodeBlockParser returns a new BlockParser that
|
||||
// parses code blocks.
|
||||
func NewCodeBlockParser() BlockParser {
|
||||
return defaultCodeBlockParser
|
||||
}
|
||||
|
||||
func (b *codeBlockParser) Trigger() []byte {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *codeBlockParser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
line, segment := reader.PeekLine()
|
||||
pos, padding := util.IndentPosition(line, reader.LineOffset(), 4)
|
||||
if pos < 0 || util.IsBlank(line) {
|
||||
return nil, NoChildren
|
||||
}
|
||||
node := ast.NewCodeBlock()
|
||||
reader.AdvanceAndSetPadding(pos, padding)
|
||||
_, segment = reader.PeekLine()
|
||||
// if code block line starts with a tab, keep a tab as it is.
|
||||
if segment.Padding != 0 {
|
||||
preserveLeadingTabInCodeBlock(&segment, reader, 0)
|
||||
}
|
||||
node.Lines().Append(segment)
|
||||
reader.Advance(segment.Len() - 1)
|
||||
return node, NoChildren
|
||||
|
||||
}
|
||||
|
||||
func (b *codeBlockParser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
line, segment := reader.PeekLine()
|
||||
if util.IsBlank(line) {
|
||||
node.Lines().Append(segment.TrimLeftSpaceWidth(4, reader.Source()))
|
||||
return Continue | NoChildren
|
||||
}
|
||||
pos, padding := util.IndentPosition(line, reader.LineOffset(), 4)
|
||||
if pos < 0 {
|
||||
return Close
|
||||
}
|
||||
reader.AdvanceAndSetPadding(pos, padding)
|
||||
_, segment = reader.PeekLine()
|
||||
|
||||
// if code block line starts with a tab, keep a tab as it is.
|
||||
if segment.Padding != 0 {
|
||||
preserveLeadingTabInCodeBlock(&segment, reader, 0)
|
||||
}
|
||||
|
||||
node.Lines().Append(segment)
|
||||
reader.Advance(segment.Len() - 1)
|
||||
return Continue | NoChildren
|
||||
}
|
||||
|
||||
func (b *codeBlockParser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
// trim trailing blank lines
|
||||
lines := node.Lines()
|
||||
length := lines.Len() - 1
|
||||
source := reader.Source()
|
||||
for length >= 0 {
|
||||
line := lines.At(length)
|
||||
if util.IsBlank(line.Value(source)) {
|
||||
length--
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
lines.SetSliced(0, length+1)
|
||||
}
|
||||
|
||||
func (b *codeBlockParser) CanInterruptParagraph() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (b *codeBlockParser) CanAcceptIndentedLine() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func preserveLeadingTabInCodeBlock(segment *text.Segment, reader text.Reader, indent int) {
|
||||
offsetWithPadding := reader.LineOffset() + indent
|
||||
sl, ss := reader.Position()
|
||||
reader.SetPosition(sl, text.NewSegment(ss.Start-1, ss.Stop))
|
||||
if offsetWithPadding == reader.LineOffset() {
|
||||
segment.Padding = 0
|
||||
segment.Start--
|
||||
}
|
||||
reader.SetPosition(sl, ss)
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
)
|
||||
|
||||
type codeSpanParser struct {
|
||||
}
|
||||
|
||||
var defaultCodeSpanParser = &codeSpanParser{}
|
||||
|
||||
// NewCodeSpanParser return a new InlineParser that parses inline codes
|
||||
// surrounded by '`' .
|
||||
func NewCodeSpanParser() InlineParser {
|
||||
return defaultCodeSpanParser
|
||||
}
|
||||
|
||||
func (s *codeSpanParser) Trigger() []byte {
|
||||
return []byte{'`'}
|
||||
}
|
||||
|
||||
func (s *codeSpanParser) Parse(parent ast.Node, block text.Reader, pc Context) ast.Node {
|
||||
line, startSegment := block.PeekLine()
|
||||
opener := 0
|
||||
for ; opener < len(line) && line[opener] == '`'; opener++ {
|
||||
}
|
||||
block.Advance(opener)
|
||||
l, pos := block.Position()
|
||||
node := ast.NewCodeSpan()
|
||||
for {
|
||||
line, segment := block.PeekLine()
|
||||
if line == nil {
|
||||
block.SetPosition(l, pos)
|
||||
return ast.NewTextSegment(startSegment.WithStop(startSegment.Start + opener))
|
||||
}
|
||||
for i := 0; i < len(line); i++ {
|
||||
c := line[i]
|
||||
if c == '`' {
|
||||
oldi := i
|
||||
for ; i < len(line) && line[i] == '`'; i++ {
|
||||
}
|
||||
closure := i - oldi
|
||||
if closure == opener && (i >= len(line) || line[i] != '`') {
|
||||
segment = segment.WithStop(segment.Start + i - closure)
|
||||
if !segment.IsEmpty() {
|
||||
node.AppendChild(node, ast.NewRawTextSegment(segment))
|
||||
}
|
||||
block.Advance(i)
|
||||
goto end
|
||||
}
|
||||
}
|
||||
}
|
||||
node.AppendChild(node, ast.NewRawTextSegment(segment))
|
||||
block.AdvanceLine()
|
||||
}
|
||||
end:
|
||||
if !node.IsBlank(block.Source()) {
|
||||
// trim first halfspace and last halfspace
|
||||
segment := node.FirstChild().(*ast.Text).Segment
|
||||
shouldTrimmed := true
|
||||
if !(!segment.IsEmpty() && isSpaceOrNewline(block.Source()[segment.Start])) {
|
||||
shouldTrimmed = false
|
||||
}
|
||||
segment = node.LastChild().(*ast.Text).Segment
|
||||
if !(!segment.IsEmpty() && isSpaceOrNewline(block.Source()[segment.Stop-1])) {
|
||||
shouldTrimmed = false
|
||||
}
|
||||
if shouldTrimmed {
|
||||
t := node.FirstChild().(*ast.Text)
|
||||
segment := t.Segment
|
||||
t.Segment = segment.WithStart(segment.Start + 1)
|
||||
t = node.LastChild().(*ast.Text)
|
||||
segment = node.LastChild().(*ast.Text).Segment
|
||||
t.Segment = segment.WithStop(segment.Stop - 1)
|
||||
}
|
||||
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
func isSpaceOrNewline(c byte) bool {
|
||||
return c == ' ' || c == '\n'
|
||||
}
|
|
@ -0,0 +1,238 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
// A DelimiterProcessor interface provides a set of functions about
|
||||
// Delimiter nodes.
|
||||
type DelimiterProcessor interface {
|
||||
// IsDelimiter returns true if given character is a delimiter, otherwise false.
|
||||
IsDelimiter(byte) bool
|
||||
|
||||
// CanOpenCloser returns true if given opener can close given closer, otherwise false.
|
||||
CanOpenCloser(opener, closer *Delimiter) bool
|
||||
|
||||
// OnMatch will be called when new matched delimiter found.
|
||||
// OnMatch should return a new Node correspond to the matched delimiter.
|
||||
OnMatch(consumes int) ast.Node
|
||||
}
|
||||
|
||||
// A Delimiter struct represents a delimiter like '*' of the Markdown text.
|
||||
type Delimiter struct {
|
||||
ast.BaseInline
|
||||
|
||||
Segment text.Segment
|
||||
|
||||
// CanOpen is set true if this delimiter can open a span for a new node.
|
||||
// See https://spec.commonmark.org/0.30/#can-open-emphasis for details.
|
||||
CanOpen bool
|
||||
|
||||
// CanClose is set true if this delimiter can close a span for a new node.
|
||||
// See https://spec.commonmark.org/0.30/#can-open-emphasis for details.
|
||||
CanClose bool
|
||||
|
||||
// Length is a remaining length of this delimiter.
|
||||
Length int
|
||||
|
||||
// OriginalLength is a original length of this delimiter.
|
||||
OriginalLength int
|
||||
|
||||
// Char is a character of this delimiter.
|
||||
Char byte
|
||||
|
||||
// PreviousDelimiter is a previous sibling delimiter node of this delimiter.
|
||||
PreviousDelimiter *Delimiter
|
||||
|
||||
// NextDelimiter is a next sibling delimiter node of this delimiter.
|
||||
NextDelimiter *Delimiter
|
||||
|
||||
// Processor is a DelimiterProcessor associated with this delimiter.
|
||||
Processor DelimiterProcessor
|
||||
}
|
||||
|
||||
// Inline implements Inline.Inline.
|
||||
func (d *Delimiter) Inline() {}
|
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (d *Delimiter) Dump(source []byte, level int) {
|
||||
fmt.Printf("%sDelimiter: \"%s\"\n", strings.Repeat(" ", level), string(d.Text(source)))
|
||||
}
|
||||
|
||||
var kindDelimiter = ast.NewNodeKind("Delimiter")
|
||||
|
||||
// Kind implements Node.Kind
|
||||
func (d *Delimiter) Kind() ast.NodeKind {
|
||||
return kindDelimiter
|
||||
}
|
||||
|
||||
// Text implements Node.Text
|
||||
func (d *Delimiter) Text(source []byte) []byte {
|
||||
return d.Segment.Value(source)
|
||||
}
|
||||
|
||||
// ConsumeCharacters consumes delimiters.
|
||||
func (d *Delimiter) ConsumeCharacters(n int) {
|
||||
d.Length -= n
|
||||
d.Segment = d.Segment.WithStop(d.Segment.Start + d.Length)
|
||||
}
|
||||
|
||||
// CalcComsumption calculates how many characters should be used for opening
|
||||
// a new span correspond to given closer.
|
||||
func (d *Delimiter) CalcComsumption(closer *Delimiter) int {
|
||||
if (d.CanClose || closer.CanOpen) && (d.OriginalLength+closer.OriginalLength)%3 == 0 && closer.OriginalLength%3 != 0 {
|
||||
return 0
|
||||
}
|
||||
if d.Length >= 2 && closer.Length >= 2 {
|
||||
return 2
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
// NewDelimiter returns a new Delimiter node.
|
||||
func NewDelimiter(canOpen, canClose bool, length int, char byte, processor DelimiterProcessor) *Delimiter {
|
||||
c := &Delimiter{
|
||||
BaseInline: ast.BaseInline{},
|
||||
CanOpen: canOpen,
|
||||
CanClose: canClose,
|
||||
Length: length,
|
||||
OriginalLength: length,
|
||||
Char: char,
|
||||
PreviousDelimiter: nil,
|
||||
NextDelimiter: nil,
|
||||
Processor: processor,
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// ScanDelimiter scans a delimiter by given DelimiterProcessor.
|
||||
func ScanDelimiter(line []byte, before rune, min int, processor DelimiterProcessor) *Delimiter {
|
||||
i := 0
|
||||
c := line[i]
|
||||
j := i
|
||||
if !processor.IsDelimiter(c) {
|
||||
return nil
|
||||
}
|
||||
for ; j < len(line) && c == line[j]; j++ {
|
||||
}
|
||||
if (j - i) >= min {
|
||||
after := rune(' ')
|
||||
if j != len(line) {
|
||||
after = util.ToRune(line, j)
|
||||
}
|
||||
|
||||
canOpen, canClose := false, false
|
||||
beforeIsPunctuation := util.IsPunctRune(before)
|
||||
beforeIsWhitespace := util.IsSpaceRune(before)
|
||||
afterIsPunctuation := util.IsPunctRune(after)
|
||||
afterIsWhitespace := util.IsSpaceRune(after)
|
||||
|
||||
isLeft := !afterIsWhitespace &&
|
||||
(!afterIsPunctuation || beforeIsWhitespace || beforeIsPunctuation)
|
||||
isRight := !beforeIsWhitespace &&
|
||||
(!beforeIsPunctuation || afterIsWhitespace || afterIsPunctuation)
|
||||
|
||||
if line[i] == '_' {
|
||||
canOpen = isLeft && (!isRight || beforeIsPunctuation)
|
||||
canClose = isRight && (!isLeft || afterIsPunctuation)
|
||||
} else {
|
||||
canOpen = isLeft
|
||||
canClose = isRight
|
||||
}
|
||||
return NewDelimiter(canOpen, canClose, j-i, c, processor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ProcessDelimiters processes the delimiter list in the context.
|
||||
// Processing will be stop when reaching the bottom.
|
||||
//
|
||||
// If you implement an inline parser that can have other inline nodes as
|
||||
// children, you should call this function when nesting span has closed.
|
||||
func ProcessDelimiters(bottom ast.Node, pc Context) {
|
||||
lastDelimiter := pc.LastDelimiter()
|
||||
if lastDelimiter == nil {
|
||||
return
|
||||
}
|
||||
var closer *Delimiter
|
||||
if bottom != nil {
|
||||
if bottom != lastDelimiter {
|
||||
for c := lastDelimiter.PreviousSibling(); c != nil && c != bottom; {
|
||||
if d, ok := c.(*Delimiter); ok {
|
||||
closer = d
|
||||
}
|
||||
c = c.PreviousSibling()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
closer = pc.FirstDelimiter()
|
||||
}
|
||||
if closer == nil {
|
||||
pc.ClearDelimiters(bottom)
|
||||
return
|
||||
}
|
||||
for closer != nil {
|
||||
if !closer.CanClose {
|
||||
closer = closer.NextDelimiter
|
||||
continue
|
||||
}
|
||||
consume := 0
|
||||
found := false
|
||||
maybeOpener := false
|
||||
var opener *Delimiter
|
||||
for opener = closer.PreviousDelimiter; opener != nil && opener != bottom; opener = opener.PreviousDelimiter {
|
||||
if opener.CanOpen && opener.Processor.CanOpenCloser(opener, closer) {
|
||||
maybeOpener = true
|
||||
consume = opener.CalcComsumption(closer)
|
||||
if consume > 0 {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
next := closer.NextDelimiter
|
||||
if !maybeOpener && !closer.CanOpen {
|
||||
pc.RemoveDelimiter(closer)
|
||||
}
|
||||
closer = next
|
||||
continue
|
||||
}
|
||||
opener.ConsumeCharacters(consume)
|
||||
closer.ConsumeCharacters(consume)
|
||||
|
||||
node := opener.Processor.OnMatch(consume)
|
||||
|
||||
parent := opener.Parent()
|
||||
child := opener.NextSibling()
|
||||
|
||||
for child != nil && child != closer {
|
||||
next := child.NextSibling()
|
||||
node.AppendChild(node, child)
|
||||
child = next
|
||||
}
|
||||
parent.InsertAfter(parent, opener, node)
|
||||
|
||||
for c := opener.NextDelimiter; c != nil && c != closer; {
|
||||
next := c.NextDelimiter
|
||||
pc.RemoveDelimiter(c)
|
||||
c = next
|
||||
}
|
||||
|
||||
if opener.Length == 0 {
|
||||
pc.RemoveDelimiter(opener)
|
||||
}
|
||||
|
||||
if closer.Length == 0 {
|
||||
next := closer.NextDelimiter
|
||||
pc.RemoveDelimiter(closer)
|
||||
closer = next
|
||||
}
|
||||
}
|
||||
pc.ClearDelimiters(bottom)
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
)
|
||||
|
||||
type emphasisDelimiterProcessor struct {
|
||||
}
|
||||
|
||||
func (p *emphasisDelimiterProcessor) IsDelimiter(b byte) bool {
|
||||
return b == '*' || b == '_'
|
||||
}
|
||||
|
||||
func (p *emphasisDelimiterProcessor) CanOpenCloser(opener, closer *Delimiter) bool {
|
||||
return opener.Char == closer.Char
|
||||
}
|
||||
|
||||
func (p *emphasisDelimiterProcessor) OnMatch(consumes int) ast.Node {
|
||||
return ast.NewEmphasis(consumes)
|
||||
}
|
||||
|
||||
var defaultEmphasisDelimiterProcessor = &emphasisDelimiterProcessor{}
|
||||
|
||||
type emphasisParser struct {
|
||||
}
|
||||
|
||||
var defaultEmphasisParser = &emphasisParser{}
|
||||
|
||||
// NewEmphasisParser return a new InlineParser that parses emphasises.
|
||||
func NewEmphasisParser() InlineParser {
|
||||
return defaultEmphasisParser
|
||||
}
|
||||
|
||||
func (s *emphasisParser) Trigger() []byte {
|
||||
return []byte{'*', '_'}
|
||||
}
|
||||
|
||||
func (s *emphasisParser) Parse(parent ast.Node, block text.Reader, pc Context) ast.Node {
|
||||
before := block.PrecendingCharacter()
|
||||
line, segment := block.PeekLine()
|
||||
node := ScanDelimiter(line, before, 1, defaultEmphasisDelimiterProcessor)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
node.Segment = segment.WithStop(segment.Start + node.OriginalLength)
|
||||
block.Advance(node.OriginalLength)
|
||||
pc.PushDelimiter(node)
|
||||
return node
|
||||
}
|
|
@ -0,0 +1,121 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type fencedCodeBlockParser struct {
|
||||
}
|
||||
|
||||
var defaultFencedCodeBlockParser = &fencedCodeBlockParser{}
|
||||
|
||||
// NewFencedCodeBlockParser returns a new BlockParser that
|
||||
// parses fenced code blocks.
|
||||
func NewFencedCodeBlockParser() BlockParser {
|
||||
return defaultFencedCodeBlockParser
|
||||
}
|
||||
|
||||
type fenceData struct {
|
||||
char byte
|
||||
indent int
|
||||
length int
|
||||
node ast.Node
|
||||
}
|
||||
|
||||
var fencedCodeBlockInfoKey = NewContextKey()
|
||||
|
||||
func (b *fencedCodeBlockParser) Trigger() []byte {
|
||||
return []byte{'~', '`'}
|
||||
}
|
||||
|
||||
func (b *fencedCodeBlockParser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
line, segment := reader.PeekLine()
|
||||
pos := pc.BlockOffset()
|
||||
if pos < 0 || (line[pos] != '`' && line[pos] != '~') {
|
||||
return nil, NoChildren
|
||||
}
|
||||
findent := pos
|
||||
fenceChar := line[pos]
|
||||
i := pos
|
||||
for ; i < len(line) && line[i] == fenceChar; i++ {
|
||||
}
|
||||
oFenceLength := i - pos
|
||||
if oFenceLength < 3 {
|
||||
return nil, NoChildren
|
||||
}
|
||||
var info *ast.Text
|
||||
if i < len(line)-1 {
|
||||
rest := line[i:]
|
||||
left := util.TrimLeftSpaceLength(rest)
|
||||
right := util.TrimRightSpaceLength(rest)
|
||||
if left < len(rest)-right {
|
||||
infoStart, infoStop := segment.Start-segment.Padding+i+left, segment.Stop-right
|
||||
value := rest[left : len(rest)-right]
|
||||
if fenceChar == '`' && bytes.IndexByte(value, '`') > -1 {
|
||||
return nil, NoChildren
|
||||
} else if infoStart != infoStop {
|
||||
info = ast.NewTextSegment(text.NewSegment(infoStart, infoStop))
|
||||
}
|
||||
}
|
||||
}
|
||||
node := ast.NewFencedCodeBlock(info)
|
||||
pc.Set(fencedCodeBlockInfoKey, &fenceData{fenceChar, findent, oFenceLength, node})
|
||||
return node, NoChildren
|
||||
|
||||
}
|
||||
|
||||
func (b *fencedCodeBlockParser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
line, segment := reader.PeekLine()
|
||||
fdata := pc.Get(fencedCodeBlockInfoKey).(*fenceData)
|
||||
|
||||
w, pos := util.IndentWidth(line, reader.LineOffset())
|
||||
if w < 4 {
|
||||
i := pos
|
||||
for ; i < len(line) && line[i] == fdata.char; i++ {
|
||||
}
|
||||
length := i - pos
|
||||
if length >= fdata.length && util.IsBlank(line[i:]) {
|
||||
newline := 1
|
||||
if line[len(line)-1] != '\n' {
|
||||
newline = 0
|
||||
}
|
||||
reader.Advance(segment.Stop - segment.Start - newline - segment.Padding)
|
||||
return Close
|
||||
}
|
||||
}
|
||||
pos, padding := util.IndentPositionPadding(line, reader.LineOffset(), segment.Padding, fdata.indent)
|
||||
if pos < 0 {
|
||||
pos = util.FirstNonSpacePosition(line)
|
||||
if pos < 0 {
|
||||
pos = 0
|
||||
}
|
||||
padding = 0
|
||||
}
|
||||
seg := text.NewSegmentPadding(segment.Start+pos, segment.Stop, padding)
|
||||
// if code block line starts with a tab, keep a tab as it is.
|
||||
if padding != 0 {
|
||||
preserveLeadingTabInCodeBlock(&seg, reader, fdata.indent)
|
||||
}
|
||||
node.Lines().Append(seg)
|
||||
reader.AdvanceAndSetPadding(segment.Stop-segment.Start-pos-1, padding)
|
||||
return Continue | NoChildren
|
||||
}
|
||||
|
||||
func (b *fencedCodeBlockParser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
fdata := pc.Get(fencedCodeBlockInfoKey).(*fenceData)
|
||||
if fdata.node == node {
|
||||
pc.Set(fencedCodeBlockInfoKey, nil)
|
||||
}
|
||||
}
|
||||
|
||||
func (b *fencedCodeBlockParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *fencedCodeBlockParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,228 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var allowedBlockTags = map[string]bool{
|
||||
"address": true,
|
||||
"article": true,
|
||||
"aside": true,
|
||||
"base": true,
|
||||
"basefont": true,
|
||||
"blockquote": true,
|
||||
"body": true,
|
||||
"caption": true,
|
||||
"center": true,
|
||||
"col": true,
|
||||
"colgroup": true,
|
||||
"dd": true,
|
||||
"details": true,
|
||||
"dialog": true,
|
||||
"dir": true,
|
||||
"div": true,
|
||||
"dl": true,
|
||||
"dt": true,
|
||||
"fieldset": true,
|
||||
"figcaption": true,
|
||||
"figure": true,
|
||||
"footer": true,
|
||||
"form": true,
|
||||
"frame": true,
|
||||
"frameset": true,
|
||||
"h1": true,
|
||||
"h2": true,
|
||||
"h3": true,
|
||||
"h4": true,
|
||||
"h5": true,
|
||||
"h6": true,
|
||||
"head": true,
|
||||
"header": true,
|
||||
"hr": true,
|
||||
"html": true,
|
||||
"iframe": true,
|
||||
"legend": true,
|
||||
"li": true,
|
||||
"link": true,
|
||||
"main": true,
|
||||
"menu": true,
|
||||
"menuitem": true,
|
||||
"meta": true,
|
||||
"nav": true,
|
||||
"noframes": true,
|
||||
"ol": true,
|
||||
"optgroup": true,
|
||||
"option": true,
|
||||
"p": true,
|
||||
"param": true,
|
||||
"section": true,
|
||||
"source": true,
|
||||
"summary": true,
|
||||
"table": true,
|
||||
"tbody": true,
|
||||
"td": true,
|
||||
"tfoot": true,
|
||||
"th": true,
|
||||
"thead": true,
|
||||
"title": true,
|
||||
"tr": true,
|
||||
"track": true,
|
||||
"ul": true,
|
||||
}
|
||||
|
||||
var htmlBlockType1OpenRegexp = regexp.MustCompile(`(?i)^[ ]{0,3}<(script|pre|style|textarea)(?:\s.*|>.*|/>.*|)(?:\r\n|\n)?$`)
|
||||
var htmlBlockType1CloseRegexp = regexp.MustCompile(`(?i)^.*</(?:script|pre|style|textarea)>.*`)
|
||||
|
||||
var htmlBlockType2OpenRegexp = regexp.MustCompile(`^[ ]{0,3}<!\-\-`)
|
||||
var htmlBlockType2Close = []byte{'-', '-', '>'}
|
||||
|
||||
var htmlBlockType3OpenRegexp = regexp.MustCompile(`^[ ]{0,3}<\?`)
|
||||
var htmlBlockType3Close = []byte{'?', '>'}
|
||||
|
||||
var htmlBlockType4OpenRegexp = regexp.MustCompile(`^[ ]{0,3}<![A-Z]+.*(?:\r\n|\n)?$`)
|
||||
var htmlBlockType4Close = []byte{'>'}
|
||||
|
||||
var htmlBlockType5OpenRegexp = regexp.MustCompile(`^[ ]{0,3}<\!\[CDATA\[`)
|
||||
var htmlBlockType5Close = []byte{']', ']', '>'}
|
||||
|
||||
var htmlBlockType6Regexp = regexp.MustCompile(`^[ ]{0,3}<(?:/[ ]*)?([a-zA-Z]+[a-zA-Z0-9\-]*)(?:[ ].*|>.*|/>.*|)(?:\r\n|\n)?$`)
|
||||
|
||||
var htmlBlockType7Regexp = regexp.MustCompile(`^[ ]{0,3}<(/[ ]*)?([a-zA-Z]+[a-zA-Z0-9\-]*)(` + attributePattern + `*)[ ]*(?:>|/>)[ ]*(?:\r\n|\n)?$`)
|
||||
|
||||
type htmlBlockParser struct {
|
||||
}
|
||||
|
||||
var defaultHTMLBlockParser = &htmlBlockParser{}
|
||||
|
||||
// NewHTMLBlockParser return a new BlockParser that can parse html
|
||||
// blocks.
|
||||
func NewHTMLBlockParser() BlockParser {
|
||||
return defaultHTMLBlockParser
|
||||
}
|
||||
|
||||
func (b *htmlBlockParser) Trigger() []byte {
|
||||
return []byte{'<'}
|
||||
}
|
||||
|
||||
func (b *htmlBlockParser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
var node *ast.HTMLBlock
|
||||
line, segment := reader.PeekLine()
|
||||
last := pc.LastOpenedBlock().Node
|
||||
if pos := pc.BlockOffset(); pos < 0 || line[pos] != '<' {
|
||||
return nil, NoChildren
|
||||
}
|
||||
|
||||
if m := htmlBlockType1OpenRegexp.FindSubmatchIndex(line); m != nil {
|
||||
node = ast.NewHTMLBlock(ast.HTMLBlockType1)
|
||||
} else if htmlBlockType2OpenRegexp.Match(line) {
|
||||
node = ast.NewHTMLBlock(ast.HTMLBlockType2)
|
||||
} else if htmlBlockType3OpenRegexp.Match(line) {
|
||||
node = ast.NewHTMLBlock(ast.HTMLBlockType3)
|
||||
} else if htmlBlockType4OpenRegexp.Match(line) {
|
||||
node = ast.NewHTMLBlock(ast.HTMLBlockType4)
|
||||
} else if htmlBlockType5OpenRegexp.Match(line) {
|
||||
node = ast.NewHTMLBlock(ast.HTMLBlockType5)
|
||||
} else if match := htmlBlockType7Regexp.FindSubmatchIndex(line); match != nil {
|
||||
isCloseTag := match[2] > -1 && bytes.Equal(line[match[2]:match[3]], []byte("/"))
|
||||
hasAttr := match[6] != match[7]
|
||||
tagName := strings.ToLower(string(line[match[4]:match[5]]))
|
||||
_, ok := allowedBlockTags[tagName]
|
||||
if ok {
|
||||
node = ast.NewHTMLBlock(ast.HTMLBlockType6)
|
||||
} else if tagName != "script" && tagName != "style" && tagName != "pre" && !ast.IsParagraph(last) && !(isCloseTag && hasAttr) { // type 7 can not interrupt paragraph
|
||||
node = ast.NewHTMLBlock(ast.HTMLBlockType7)
|
||||
}
|
||||
}
|
||||
if node == nil {
|
||||
if match := htmlBlockType6Regexp.FindSubmatchIndex(line); match != nil {
|
||||
tagName := string(line[match[2]:match[3]])
|
||||
_, ok := allowedBlockTags[strings.ToLower(tagName)]
|
||||
if ok {
|
||||
node = ast.NewHTMLBlock(ast.HTMLBlockType6)
|
||||
}
|
||||
}
|
||||
}
|
||||
if node != nil {
|
||||
reader.Advance(segment.Len() - 1)
|
||||
node.Lines().Append(segment)
|
||||
return node, NoChildren
|
||||
}
|
||||
return nil, NoChildren
|
||||
}
|
||||
|
||||
func (b *htmlBlockParser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
htmlBlock := node.(*ast.HTMLBlock)
|
||||
lines := htmlBlock.Lines()
|
||||
line, segment := reader.PeekLine()
|
||||
var closurePattern []byte
|
||||
|
||||
switch htmlBlock.HTMLBlockType {
|
||||
case ast.HTMLBlockType1:
|
||||
if lines.Len() == 1 {
|
||||
firstLine := lines.At(0)
|
||||
if htmlBlockType1CloseRegexp.Match(firstLine.Value(reader.Source())) {
|
||||
return Close
|
||||
}
|
||||
}
|
||||
if htmlBlockType1CloseRegexp.Match(line) {
|
||||
htmlBlock.ClosureLine = segment
|
||||
reader.Advance(segment.Len() - 1)
|
||||
return Close
|
||||
}
|
||||
case ast.HTMLBlockType2:
|
||||
closurePattern = htmlBlockType2Close
|
||||
fallthrough
|
||||
case ast.HTMLBlockType3:
|
||||
if closurePattern == nil {
|
||||
closurePattern = htmlBlockType3Close
|
||||
}
|
||||
fallthrough
|
||||
case ast.HTMLBlockType4:
|
||||
if closurePattern == nil {
|
||||
closurePattern = htmlBlockType4Close
|
||||
}
|
||||
fallthrough
|
||||
case ast.HTMLBlockType5:
|
||||
if closurePattern == nil {
|
||||
closurePattern = htmlBlockType5Close
|
||||
}
|
||||
|
||||
if lines.Len() == 1 {
|
||||
firstLine := lines.At(0)
|
||||
if bytes.Contains(firstLine.Value(reader.Source()), closurePattern) {
|
||||
return Close
|
||||
}
|
||||
}
|
||||
if bytes.Contains(line, closurePattern) {
|
||||
htmlBlock.ClosureLine = segment
|
||||
reader.Advance(segment.Len())
|
||||
return Close
|
||||
}
|
||||
|
||||
case ast.HTMLBlockType6, ast.HTMLBlockType7:
|
||||
if util.IsBlank(line) {
|
||||
return Close
|
||||
}
|
||||
}
|
||||
node.Lines().Append(segment)
|
||||
reader.Advance(segment.Len() - 1)
|
||||
return Continue | NoChildren
|
||||
}
|
||||
|
||||
func (b *htmlBlockParser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
func (b *htmlBlockParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *htmlBlockParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,409 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var linkLabelStateKey = NewContextKey()
|
||||
|
||||
type linkLabelState struct {
|
||||
ast.BaseInline
|
||||
|
||||
Segment text.Segment
|
||||
|
||||
IsImage bool
|
||||
|
||||
Prev *linkLabelState
|
||||
|
||||
Next *linkLabelState
|
||||
|
||||
First *linkLabelState
|
||||
|
||||
Last *linkLabelState
|
||||
}
|
||||
|
||||
func newLinkLabelState(segment text.Segment, isImage bool) *linkLabelState {
|
||||
return &linkLabelState{
|
||||
Segment: segment,
|
||||
IsImage: isImage,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *linkLabelState) Text(source []byte) []byte {
|
||||
return s.Segment.Value(source)
|
||||
}
|
||||
|
||||
func (s *linkLabelState) Dump(source []byte, level int) {
|
||||
fmt.Printf("%slinkLabelState: \"%s\"\n", strings.Repeat(" ", level), s.Text(source))
|
||||
}
|
||||
|
||||
var kindLinkLabelState = ast.NewNodeKind("LinkLabelState")
|
||||
|
||||
func (s *linkLabelState) Kind() ast.NodeKind {
|
||||
return kindLinkLabelState
|
||||
}
|
||||
|
||||
func linkLabelStateLength(v *linkLabelState) int {
|
||||
if v == nil || v.Last == nil || v.First == nil {
|
||||
return 0
|
||||
}
|
||||
return v.Last.Segment.Stop - v.First.Segment.Start
|
||||
}
|
||||
|
||||
func pushLinkLabelState(pc Context, v *linkLabelState) {
|
||||
tlist := pc.Get(linkLabelStateKey)
|
||||
var list *linkLabelState
|
||||
if tlist == nil {
|
||||
list = v
|
||||
v.First = v
|
||||
v.Last = v
|
||||
pc.Set(linkLabelStateKey, list)
|
||||
} else {
|
||||
list = tlist.(*linkLabelState)
|
||||
l := list.Last
|
||||
list.Last = v
|
||||
l.Next = v
|
||||
v.Prev = l
|
||||
}
|
||||
}
|
||||
|
||||
func removeLinkLabelState(pc Context, d *linkLabelState) {
|
||||
tlist := pc.Get(linkLabelStateKey)
|
||||
var list *linkLabelState
|
||||
if tlist == nil {
|
||||
return
|
||||
}
|
||||
list = tlist.(*linkLabelState)
|
||||
|
||||
if d.Prev == nil {
|
||||
list = d.Next
|
||||
if list != nil {
|
||||
list.First = d
|
||||
list.Last = d.Last
|
||||
list.Prev = nil
|
||||
pc.Set(linkLabelStateKey, list)
|
||||
} else {
|
||||
pc.Set(linkLabelStateKey, nil)
|
||||
}
|
||||
} else {
|
||||
d.Prev.Next = d.Next
|
||||
if d.Next != nil {
|
||||
d.Next.Prev = d.Prev
|
||||
}
|
||||
}
|
||||
if list != nil && d.Next == nil {
|
||||
list.Last = d.Prev
|
||||
}
|
||||
d.Next = nil
|
||||
d.Prev = nil
|
||||
d.First = nil
|
||||
d.Last = nil
|
||||
}
|
||||
|
||||
type linkParser struct {
|
||||
}
|
||||
|
||||
var defaultLinkParser = &linkParser{}
|
||||
|
||||
// NewLinkParser return a new InlineParser that parses links.
|
||||
func NewLinkParser() InlineParser {
|
||||
return defaultLinkParser
|
||||
}
|
||||
|
||||
func (s *linkParser) Trigger() []byte {
|
||||
return []byte{'!', '[', ']'}
|
||||
}
|
||||
|
||||
var linkBottom = NewContextKey()
|
||||
|
||||
func (s *linkParser) Parse(parent ast.Node, block text.Reader, pc Context) ast.Node {
|
||||
line, segment := block.PeekLine()
|
||||
if line[0] == '!' {
|
||||
if len(line) > 1 && line[1] == '[' {
|
||||
block.Advance(1)
|
||||
pc.Set(linkBottom, pc.LastDelimiter())
|
||||
return processLinkLabelOpen(block, segment.Start+1, true, pc)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if line[0] == '[' {
|
||||
pc.Set(linkBottom, pc.LastDelimiter())
|
||||
return processLinkLabelOpen(block, segment.Start, false, pc)
|
||||
}
|
||||
|
||||
// line[0] == ']'
|
||||
tlist := pc.Get(linkLabelStateKey)
|
||||
if tlist == nil {
|
||||
return nil
|
||||
}
|
||||
last := tlist.(*linkLabelState).Last
|
||||
if last == nil {
|
||||
return nil
|
||||
}
|
||||
block.Advance(1)
|
||||
removeLinkLabelState(pc, last)
|
||||
// CommonMark spec says:
|
||||
// > A link label can have at most 999 characters inside the square brackets.
|
||||
if linkLabelStateLength(tlist.(*linkLabelState)) > 998 {
|
||||
ast.MergeOrReplaceTextSegment(last.Parent(), last, last.Segment)
|
||||
return nil
|
||||
}
|
||||
|
||||
if !last.IsImage && s.containsLink(last) { // a link in a link text is not allowed
|
||||
ast.MergeOrReplaceTextSegment(last.Parent(), last, last.Segment)
|
||||
return nil
|
||||
}
|
||||
|
||||
c := block.Peek()
|
||||
l, pos := block.Position()
|
||||
var link *ast.Link
|
||||
var hasValue bool
|
||||
if c == '(' { // normal link
|
||||
link = s.parseLink(parent, last, block, pc)
|
||||
} else if c == '[' { // reference link
|
||||
link, hasValue = s.parseReferenceLink(parent, last, block, pc)
|
||||
if link == nil && hasValue {
|
||||
ast.MergeOrReplaceTextSegment(last.Parent(), last, last.Segment)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if link == nil {
|
||||
// maybe shortcut reference link
|
||||
block.SetPosition(l, pos)
|
||||
ssegment := text.NewSegment(last.Segment.Stop, segment.Start)
|
||||
maybeReference := block.Value(ssegment)
|
||||
// CommonMark spec says:
|
||||
// > A link label can have at most 999 characters inside the square brackets.
|
||||
if len(maybeReference) > 999 {
|
||||
ast.MergeOrReplaceTextSegment(last.Parent(), last, last.Segment)
|
||||
return nil
|
||||
}
|
||||
|
||||
ref, ok := pc.Reference(util.ToLinkReference(maybeReference))
|
||||
if !ok {
|
||||
ast.MergeOrReplaceTextSegment(last.Parent(), last, last.Segment)
|
||||
return nil
|
||||
}
|
||||
link = ast.NewLink()
|
||||
s.processLinkLabel(parent, link, last, pc)
|
||||
link.Title = ref.Title()
|
||||
link.Destination = ref.Destination()
|
||||
}
|
||||
if last.IsImage {
|
||||
last.Parent().RemoveChild(last.Parent(), last)
|
||||
return ast.NewImage(link)
|
||||
}
|
||||
last.Parent().RemoveChild(last.Parent(), last)
|
||||
return link
|
||||
}
|
||||
|
||||
func (s *linkParser) containsLink(n ast.Node) bool {
|
||||
if n == nil {
|
||||
return false
|
||||
}
|
||||
for c := n; c != nil; c = c.NextSibling() {
|
||||
if _, ok := c.(*ast.Link); ok {
|
||||
return true
|
||||
}
|
||||
if s.containsLink(c.FirstChild()) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func processLinkLabelOpen(block text.Reader, pos int, isImage bool, pc Context) *linkLabelState {
|
||||
start := pos
|
||||
if isImage {
|
||||
start--
|
||||
}
|
||||
state := newLinkLabelState(text.NewSegment(start, pos+1), isImage)
|
||||
pushLinkLabelState(pc, state)
|
||||
block.Advance(1)
|
||||
return state
|
||||
}
|
||||
|
||||
func (s *linkParser) processLinkLabel(parent ast.Node, link *ast.Link, last *linkLabelState, pc Context) {
|
||||
var bottom ast.Node
|
||||
if v := pc.Get(linkBottom); v != nil {
|
||||
bottom = v.(ast.Node)
|
||||
}
|
||||
pc.Set(linkBottom, nil)
|
||||
ProcessDelimiters(bottom, pc)
|
||||
for c := last.NextSibling(); c != nil; {
|
||||
next := c.NextSibling()
|
||||
parent.RemoveChild(parent, c)
|
||||
link.AppendChild(link, c)
|
||||
c = next
|
||||
}
|
||||
}
|
||||
|
||||
var linkFindClosureOptions text.FindClosureOptions = text.FindClosureOptions{
|
||||
Nesting: false,
|
||||
Newline: true,
|
||||
Advance: true,
|
||||
}
|
||||
|
||||
func (s *linkParser) parseReferenceLink(parent ast.Node, last *linkLabelState, block text.Reader, pc Context) (*ast.Link, bool) {
|
||||
_, orgpos := block.Position()
|
||||
block.Advance(1) // skip '['
|
||||
segments, found := block.FindClosure('[', ']', linkFindClosureOptions)
|
||||
if !found {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
var maybeReference []byte
|
||||
if segments.Len() == 1 { // avoid allocate a new byte slice
|
||||
maybeReference = block.Value(segments.At(0))
|
||||
} else {
|
||||
maybeReference = []byte{}
|
||||
for i := 0; i < segments.Len(); i++ {
|
||||
s := segments.At(i)
|
||||
maybeReference = append(maybeReference, block.Value(s)...)
|
||||
}
|
||||
}
|
||||
if util.IsBlank(maybeReference) { // collapsed reference link
|
||||
s := text.NewSegment(last.Segment.Stop, orgpos.Start-1)
|
||||
maybeReference = block.Value(s)
|
||||
}
|
||||
// CommonMark spec says:
|
||||
// > A link label can have at most 999 characters inside the square brackets.
|
||||
if len(maybeReference) > 999 {
|
||||
return nil, true
|
||||
}
|
||||
|
||||
ref, ok := pc.Reference(util.ToLinkReference(maybeReference))
|
||||
if !ok {
|
||||
return nil, true
|
||||
}
|
||||
|
||||
link := ast.NewLink()
|
||||
s.processLinkLabel(parent, link, last, pc)
|
||||
link.Title = ref.Title()
|
||||
link.Destination = ref.Destination()
|
||||
return link, true
|
||||
}
|
||||
|
||||
func (s *linkParser) parseLink(parent ast.Node, last *linkLabelState, block text.Reader, pc Context) *ast.Link {
|
||||
block.Advance(1) // skip '('
|
||||
block.SkipSpaces()
|
||||
var title []byte
|
||||
var destination []byte
|
||||
var ok bool
|
||||
if block.Peek() == ')' { // empty link like '[link]()'
|
||||
block.Advance(1)
|
||||
} else {
|
||||
destination, ok = parseLinkDestination(block)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
block.SkipSpaces()
|
||||
if block.Peek() == ')' {
|
||||
block.Advance(1)
|
||||
} else {
|
||||
title, ok = parseLinkTitle(block)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
block.SkipSpaces()
|
||||
if block.Peek() == ')' {
|
||||
block.Advance(1)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
link := ast.NewLink()
|
||||
s.processLinkLabel(parent, link, last, pc)
|
||||
link.Destination = destination
|
||||
link.Title = title
|
||||
return link
|
||||
}
|
||||
|
||||
func parseLinkDestination(block text.Reader) ([]byte, bool) {
|
||||
block.SkipSpaces()
|
||||
line, _ := block.PeekLine()
|
||||
if block.Peek() == '<' {
|
||||
i := 1
|
||||
for i < len(line) {
|
||||
c := line[i]
|
||||
if c == '\\' && i < len(line)-1 && util.IsPunct(line[i+1]) {
|
||||
i += 2
|
||||
continue
|
||||
} else if c == '>' {
|
||||
block.Advance(i + 1)
|
||||
return line[1:i], true
|
||||
}
|
||||
i++
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
opened := 0
|
||||
i := 0
|
||||
for i < len(line) {
|
||||
c := line[i]
|
||||
if c == '\\' && i < len(line)-1 && util.IsPunct(line[i+1]) {
|
||||
i += 2
|
||||
continue
|
||||
} else if c == '(' {
|
||||
opened++
|
||||
} else if c == ')' {
|
||||
opened--
|
||||
if opened < 0 {
|
||||
break
|
||||
}
|
||||
} else if util.IsSpace(c) {
|
||||
break
|
||||
}
|
||||
i++
|
||||
}
|
||||
block.Advance(i)
|
||||
return line[:i], len(line[:i]) != 0
|
||||
}
|
||||
|
||||
func parseLinkTitle(block text.Reader) ([]byte, bool) {
|
||||
block.SkipSpaces()
|
||||
opener := block.Peek()
|
||||
if opener != '"' && opener != '\'' && opener != '(' {
|
||||
return nil, false
|
||||
}
|
||||
closer := opener
|
||||
if opener == '(' {
|
||||
closer = ')'
|
||||
}
|
||||
block.Advance(1)
|
||||
segments, found := block.FindClosure(opener, closer, linkFindClosureOptions)
|
||||
if found {
|
||||
if segments.Len() == 1 {
|
||||
return block.Value(segments.At(0)), true
|
||||
}
|
||||
var title []byte
|
||||
for i := 0; i < segments.Len(); i++ {
|
||||
s := segments.At(i)
|
||||
title = append(title, block.Value(s)...)
|
||||
}
|
||||
return title, true
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (s *linkParser) CloseBlock(parent ast.Node, block text.Reader, pc Context) {
|
||||
pc.Set(linkBottom, nil)
|
||||
tlist := pc.Get(linkLabelStateKey)
|
||||
if tlist == nil {
|
||||
return
|
||||
}
|
||||
for s := tlist.(*linkLabelState); s != nil; {
|
||||
next := s.Next
|
||||
removeLinkLabelState(pc, s)
|
||||
s.Parent().ReplaceChild(s.Parent(), s, ast.NewTextSegment(s.Segment))
|
||||
s = next
|
||||
}
|
||||
}
|
|
@ -0,0 +1,152 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type linkReferenceParagraphTransformer struct {
|
||||
}
|
||||
|
||||
// LinkReferenceParagraphTransformer is a ParagraphTransformer implementation
|
||||
// that parses and extracts link reference from paragraphs.
|
||||
var LinkReferenceParagraphTransformer = &linkReferenceParagraphTransformer{}
|
||||
|
||||
func (p *linkReferenceParagraphTransformer) Transform(node *ast.Paragraph, reader text.Reader, pc Context) {
|
||||
lines := node.Lines()
|
||||
block := text.NewBlockReader(reader.Source(), lines)
|
||||
removes := [][2]int{}
|
||||
for {
|
||||
start, end := parseLinkReferenceDefinition(block, pc)
|
||||
if start > -1 {
|
||||
if start == end {
|
||||
end++
|
||||
}
|
||||
removes = append(removes, [2]int{start, end})
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
offset := 0
|
||||
for _, remove := range removes {
|
||||
if lines.Len() == 0 {
|
||||
break
|
||||
}
|
||||
s := lines.Sliced(remove[1]-offset, lines.Len())
|
||||
lines.SetSliced(0, remove[0]-offset)
|
||||
lines.AppendAll(s)
|
||||
offset = remove[1]
|
||||
}
|
||||
|
||||
if lines.Len() == 0 {
|
||||
t := ast.NewTextBlock()
|
||||
t.SetBlankPreviousLines(node.HasBlankPreviousLines())
|
||||
node.Parent().ReplaceChild(node.Parent(), node, t)
|
||||
return
|
||||
}
|
||||
|
||||
node.SetLines(lines)
|
||||
}
|
||||
|
||||
func parseLinkReferenceDefinition(block text.Reader, pc Context) (int, int) {
|
||||
block.SkipSpaces()
|
||||
line, _ := block.PeekLine()
|
||||
if line == nil {
|
||||
return -1, -1
|
||||
}
|
||||
startLine, _ := block.Position()
|
||||
width, pos := util.IndentWidth(line, 0)
|
||||
if width > 3 {
|
||||
return -1, -1
|
||||
}
|
||||
if width != 0 {
|
||||
pos++
|
||||
}
|
||||
if line[pos] != '[' {
|
||||
return -1, -1
|
||||
}
|
||||
block.Advance(pos + 1)
|
||||
segments, found := block.FindClosure('[', ']', linkFindClosureOptions)
|
||||
if !found {
|
||||
return -1, -1
|
||||
}
|
||||
var label []byte
|
||||
if segments.Len() == 1 {
|
||||
label = block.Value(segments.At(0))
|
||||
} else {
|
||||
for i := 0; i < segments.Len(); i++ {
|
||||
s := segments.At(i)
|
||||
label = append(label, block.Value(s)...)
|
||||
}
|
||||
}
|
||||
if util.IsBlank(label) {
|
||||
return -1, -1
|
||||
}
|
||||
if block.Peek() != ':' {
|
||||
return -1, -1
|
||||
}
|
||||
block.Advance(1)
|
||||
block.SkipSpaces()
|
||||
destination, ok := parseLinkDestination(block)
|
||||
if !ok {
|
||||
return -1, -1
|
||||
}
|
||||
line, _ = block.PeekLine()
|
||||
isNewLine := line == nil || util.IsBlank(line)
|
||||
|
||||
endLine, _ := block.Position()
|
||||
_, spaces, _ := block.SkipSpaces()
|
||||
opener := block.Peek()
|
||||
if opener != '"' && opener != '\'' && opener != '(' {
|
||||
if !isNewLine {
|
||||
return -1, -1
|
||||
}
|
||||
ref := NewReference(label, destination, nil)
|
||||
pc.AddReference(ref)
|
||||
return startLine, endLine + 1
|
||||
}
|
||||
if spaces == 0 {
|
||||
return -1, -1
|
||||
}
|
||||
block.Advance(1)
|
||||
closer := opener
|
||||
if opener == '(' {
|
||||
closer = ')'
|
||||
}
|
||||
segments, found = block.FindClosure(opener, closer, linkFindClosureOptions)
|
||||
if !found {
|
||||
if !isNewLine {
|
||||
return -1, -1
|
||||
}
|
||||
ref := NewReference(label, destination, nil)
|
||||
pc.AddReference(ref)
|
||||
block.AdvanceLine()
|
||||
return startLine, endLine + 1
|
||||
}
|
||||
var title []byte
|
||||
if segments.Len() == 1 {
|
||||
title = block.Value(segments.At(0))
|
||||
} else {
|
||||
for i := 0; i < segments.Len(); i++ {
|
||||
s := segments.At(i)
|
||||
title = append(title, block.Value(s)...)
|
||||
}
|
||||
}
|
||||
|
||||
line, _ = block.PeekLine()
|
||||
if line != nil && !util.IsBlank(line) {
|
||||
if !isNewLine {
|
||||
return -1, -1
|
||||
}
|
||||
ref := NewReference(label, destination, title)
|
||||
pc.AddReference(ref)
|
||||
return startLine, endLine
|
||||
}
|
||||
|
||||
endLine, _ = block.Position()
|
||||
ref := NewReference(label, destination, title)
|
||||
pc.AddReference(ref)
|
||||
return startLine, endLine + 1
|
||||
}
|
|
@ -0,0 +1,283 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type listItemType int
|
||||
|
||||
const (
|
||||
notList listItemType = iota
|
||||
bulletList
|
||||
orderedList
|
||||
)
|
||||
|
||||
var skipListParserKey = NewContextKey()
|
||||
var emptyListItemWithBlankLines = NewContextKey()
|
||||
var listItemFlagValue interface{} = true
|
||||
|
||||
// Same as
|
||||
// `^(([ ]*)([\-\*\+]))(\s+.*)?\n?$`.FindSubmatchIndex or
|
||||
// `^(([ ]*)(\d{1,9}[\.\)]))(\s+.*)?\n?$`.FindSubmatchIndex
|
||||
func parseListItem(line []byte) ([6]int, listItemType) {
|
||||
i := 0
|
||||
l := len(line)
|
||||
ret := [6]int{}
|
||||
for ; i < l && line[i] == ' '; i++ {
|
||||
c := line[i]
|
||||
if c == '\t' {
|
||||
return ret, notList
|
||||
}
|
||||
}
|
||||
if i > 3 {
|
||||
return ret, notList
|
||||
}
|
||||
ret[0] = 0
|
||||
ret[1] = i
|
||||
ret[2] = i
|
||||
var typ listItemType
|
||||
if i < l && (line[i] == '-' || line[i] == '*' || line[i] == '+') {
|
||||
i++
|
||||
ret[3] = i
|
||||
typ = bulletList
|
||||
} else if i < l {
|
||||
for ; i < l && util.IsNumeric(line[i]); i++ {
|
||||
}
|
||||
ret[3] = i
|
||||
if ret[3] == ret[2] || ret[3]-ret[2] > 9 {
|
||||
return ret, notList
|
||||
}
|
||||
if i < l && (line[i] == '.' || line[i] == ')') {
|
||||
i++
|
||||
ret[3] = i
|
||||
} else {
|
||||
return ret, notList
|
||||
}
|
||||
typ = orderedList
|
||||
} else {
|
||||
return ret, notList
|
||||
}
|
||||
if i < l && line[i] != '\n' {
|
||||
w, _ := util.IndentWidth(line[i:], 0)
|
||||
if w == 0 {
|
||||
return ret, notList
|
||||
}
|
||||
}
|
||||
if i >= l {
|
||||
ret[4] = -1
|
||||
ret[5] = -1
|
||||
return ret, typ
|
||||
}
|
||||
ret[4] = i
|
||||
ret[5] = len(line)
|
||||
if line[ret[5]-1] == '\n' && line[i] != '\n' {
|
||||
ret[5]--
|
||||
}
|
||||
return ret, typ
|
||||
}
|
||||
|
||||
func matchesListItem(source []byte, strict bool) ([6]int, listItemType) {
|
||||
m, typ := parseListItem(source)
|
||||
if typ != notList && (!strict || strict && m[1] < 4) {
|
||||
return m, typ
|
||||
}
|
||||
return m, notList
|
||||
}
|
||||
|
||||
func calcListOffset(source []byte, match [6]int) int {
|
||||
offset := 0
|
||||
if match[4] < 0 || util.IsBlank(source[match[4]:]) { // list item starts with a blank line
|
||||
offset = 1
|
||||
} else {
|
||||
offset, _ = util.IndentWidth(source[match[4]:], match[4])
|
||||
if offset > 4 { // offseted codeblock
|
||||
offset = 1
|
||||
}
|
||||
}
|
||||
return offset
|
||||
}
|
||||
|
||||
func lastOffset(node ast.Node) int {
|
||||
lastChild := node.LastChild()
|
||||
if lastChild != nil {
|
||||
return lastChild.(*ast.ListItem).Offset
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type listParser struct {
|
||||
}
|
||||
|
||||
var defaultListParser = &listParser{}
|
||||
|
||||
// NewListParser returns a new BlockParser that
|
||||
// parses lists.
|
||||
// This parser must take precedence over the ListItemParser.
|
||||
func NewListParser() BlockParser {
|
||||
return defaultListParser
|
||||
}
|
||||
|
||||
func (b *listParser) Trigger() []byte {
|
||||
return []byte{'-', '+', '*', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9'}
|
||||
}
|
||||
|
||||
func (b *listParser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
last := pc.LastOpenedBlock().Node
|
||||
if _, lok := last.(*ast.List); lok || pc.Get(skipListParserKey) != nil {
|
||||
pc.Set(skipListParserKey, nil)
|
||||
return nil, NoChildren
|
||||
}
|
||||
line, _ := reader.PeekLine()
|
||||
match, typ := matchesListItem(line, true)
|
||||
if typ == notList {
|
||||
return nil, NoChildren
|
||||
}
|
||||
start := -1
|
||||
if typ == orderedList {
|
||||
number := line[match[2] : match[3]-1]
|
||||
start, _ = strconv.Atoi(string(number))
|
||||
}
|
||||
|
||||
if ast.IsParagraph(last) && last.Parent() == parent {
|
||||
// we allow only lists starting with 1 to interrupt paragraphs.
|
||||
if typ == orderedList && start != 1 {
|
||||
return nil, NoChildren
|
||||
}
|
||||
//an empty list item cannot interrupt a paragraph:
|
||||
if match[4] < 0 || util.IsBlank(line[match[4]:match[5]]) {
|
||||
return nil, NoChildren
|
||||
}
|
||||
}
|
||||
|
||||
marker := line[match[3]-1]
|
||||
node := ast.NewList(marker)
|
||||
if start > -1 {
|
||||
node.Start = start
|
||||
}
|
||||
pc.Set(emptyListItemWithBlankLines, nil)
|
||||
return node, HasChildren
|
||||
}
|
||||
|
||||
func (b *listParser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
list := node.(*ast.List)
|
||||
line, _ := reader.PeekLine()
|
||||
if util.IsBlank(line) {
|
||||
if node.LastChild().ChildCount() == 0 {
|
||||
pc.Set(emptyListItemWithBlankLines, listItemFlagValue)
|
||||
}
|
||||
return Continue | HasChildren
|
||||
}
|
||||
|
||||
// "offset" means a width that bar indicates.
|
||||
// - aaaaaaaa
|
||||
// |----|
|
||||
//
|
||||
// If the indent is less than the last offset like
|
||||
// - a
|
||||
// - b <--- current line
|
||||
// it maybe a new child of the list.
|
||||
//
|
||||
// Empty list items can have multiple blanklines
|
||||
//
|
||||
// - <--- 1st item is an empty thus "offset" is unknown
|
||||
//
|
||||
//
|
||||
// - <--- current line
|
||||
//
|
||||
// -> 1 list with 2 blank items
|
||||
//
|
||||
// So if the last item is an empty, it maybe a new child of the list.
|
||||
//
|
||||
offset := lastOffset(node)
|
||||
lastIsEmpty := node.LastChild().ChildCount() == 0
|
||||
indent, _ := util.IndentWidth(line, reader.LineOffset())
|
||||
|
||||
if indent < offset || lastIsEmpty {
|
||||
if indent < 4 {
|
||||
match, typ := matchesListItem(line, false) // may have a leading spaces more than 3
|
||||
if typ != notList && match[1]-offset < 4 {
|
||||
marker := line[match[3]-1]
|
||||
if !list.CanContinue(marker, typ == orderedList) {
|
||||
return Close
|
||||
}
|
||||
// Thematic Breaks take precedence over lists
|
||||
if isThematicBreak(line[match[3]-1:], 0) {
|
||||
isHeading := false
|
||||
last := pc.LastOpenedBlock().Node
|
||||
if ast.IsParagraph(last) {
|
||||
c, ok := matchesSetextHeadingBar(line[match[3]-1:])
|
||||
if ok && c == '-' {
|
||||
isHeading = true
|
||||
}
|
||||
}
|
||||
if !isHeading {
|
||||
return Close
|
||||
}
|
||||
}
|
||||
return Continue | HasChildren
|
||||
}
|
||||
}
|
||||
if !lastIsEmpty {
|
||||
return Close
|
||||
}
|
||||
}
|
||||
|
||||
// Non empty items can not exist next to an empty list item
|
||||
// with blank lines. So we need to close the current list
|
||||
//
|
||||
// -
|
||||
//
|
||||
// foo
|
||||
//
|
||||
// -> 1 list with 1 blank items and 1 paragraph
|
||||
if pc.Get(emptyListItemWithBlankLines) != nil {
|
||||
return Close
|
||||
}
|
||||
return Continue | HasChildren
|
||||
}
|
||||
|
||||
func (b *listParser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
list := node.(*ast.List)
|
||||
|
||||
for c := node.FirstChild(); c != nil && list.IsTight; c = c.NextSibling() {
|
||||
if c.FirstChild() != nil && c.FirstChild() != c.LastChild() {
|
||||
for c1 := c.FirstChild().NextSibling(); c1 != nil; c1 = c1.NextSibling() {
|
||||
if bl, ok := c1.(ast.Node); ok && bl.HasBlankPreviousLines() {
|
||||
list.IsTight = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if c != node.FirstChild() {
|
||||
if bl, ok := c.(ast.Node); ok && bl.HasBlankPreviousLines() {
|
||||
list.IsTight = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if list.IsTight {
|
||||
for child := node.FirstChild(); child != nil; child = child.NextSibling() {
|
||||
for gc := child.FirstChild(); gc != nil; {
|
||||
paragraph, ok := gc.(*ast.Paragraph)
|
||||
gc = gc.NextSibling()
|
||||
if ok {
|
||||
textBlock := ast.NewTextBlock()
|
||||
textBlock.SetLines(paragraph.Lines())
|
||||
child.ReplaceChild(child, paragraph, textBlock)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *listParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *listParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type listItemParser struct {
|
||||
}
|
||||
|
||||
var defaultListItemParser = &listItemParser{}
|
||||
|
||||
// NewListItemParser returns a new BlockParser that
|
||||
// parses list items.
|
||||
func NewListItemParser() BlockParser {
|
||||
return defaultListItemParser
|
||||
}
|
||||
|
||||
func (b *listItemParser) Trigger() []byte {
|
||||
return []byte{'-', '+', '*', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9'}
|
||||
}
|
||||
|
||||
func (b *listItemParser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
list, lok := parent.(*ast.List)
|
||||
if !lok { // list item must be a child of a list
|
||||
return nil, NoChildren
|
||||
}
|
||||
offset := lastOffset(list)
|
||||
line, _ := reader.PeekLine()
|
||||
match, typ := matchesListItem(line, false)
|
||||
if typ == notList {
|
||||
return nil, NoChildren
|
||||
}
|
||||
if match[1]-offset > 3 {
|
||||
return nil, NoChildren
|
||||
}
|
||||
|
||||
pc.Set(emptyListItemWithBlankLines, nil)
|
||||
|
||||
itemOffset := calcListOffset(line, match)
|
||||
node := ast.NewListItem(match[3] + itemOffset)
|
||||
if match[4] < 0 || util.IsBlank(line[match[4]:match[5]]) {
|
||||
return node, NoChildren
|
||||
}
|
||||
|
||||
pos, padding := util.IndentPosition(line[match[4]:], match[4], itemOffset)
|
||||
child := match[3] + pos
|
||||
reader.AdvanceAndSetPadding(child, padding)
|
||||
return node, HasChildren
|
||||
}
|
||||
|
||||
func (b *listItemParser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
line, _ := reader.PeekLine()
|
||||
if util.IsBlank(line) {
|
||||
reader.Advance(len(line) - 1)
|
||||
return Continue | HasChildren
|
||||
}
|
||||
|
||||
offset := lastOffset(node.Parent())
|
||||
isEmpty := node.ChildCount() == 0
|
||||
indent, _ := util.IndentWidth(line, reader.LineOffset())
|
||||
if (isEmpty || indent < offset) && indent < 4 {
|
||||
_, typ := matchesListItem(line, true)
|
||||
// new list item found
|
||||
if typ != notList {
|
||||
pc.Set(skipListParserKey, listItemFlagValue)
|
||||
return Close
|
||||
}
|
||||
if !isEmpty {
|
||||
return Close
|
||||
}
|
||||
}
|
||||
pos, padding := util.IndentPosition(line, reader.LineOffset(), offset)
|
||||
reader.AdvanceAndSetPadding(pos, padding)
|
||||
|
||||
return Continue | HasChildren
|
||||
}
|
||||
|
||||
func (b *listItemParser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
func (b *listItemParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *listItemParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
)
|
||||
|
||||
type paragraphParser struct {
|
||||
}
|
||||
|
||||
var defaultParagraphParser = ¶graphParser{}
|
||||
|
||||
// NewParagraphParser returns a new BlockParser that
|
||||
// parses paragraphs.
|
||||
func NewParagraphParser() BlockParser {
|
||||
return defaultParagraphParser
|
||||
}
|
||||
|
||||
func (b *paragraphParser) Trigger() []byte {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *paragraphParser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
_, segment := reader.PeekLine()
|
||||
segment = segment.TrimLeftSpace(reader.Source())
|
||||
if segment.IsEmpty() {
|
||||
return nil, NoChildren
|
||||
}
|
||||
node := ast.NewParagraph()
|
||||
node.Lines().Append(segment)
|
||||
reader.Advance(segment.Len() - 1)
|
||||
return node, NoChildren
|
||||
}
|
||||
|
||||
func (b *paragraphParser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
_, segment := reader.PeekLine()
|
||||
segment = segment.TrimLeftSpace(reader.Source())
|
||||
if segment.IsEmpty() {
|
||||
return Close
|
||||
}
|
||||
node.Lines().Append(segment)
|
||||
reader.Advance(segment.Len() - 1)
|
||||
return Continue | NoChildren
|
||||
}
|
||||
|
||||
func (b *paragraphParser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
parent := node.Parent()
|
||||
if parent == nil {
|
||||
// paragraph has been transformed
|
||||
return
|
||||
}
|
||||
lines := node.Lines()
|
||||
if lines.Len() != 0 {
|
||||
// trim trailing spaces
|
||||
length := lines.Len()
|
||||
lastLine := node.Lines().At(length - 1)
|
||||
node.Lines().Set(length-1, lastLine.TrimRightSpace(reader.Source()))
|
||||
}
|
||||
if lines.Len() == 0 {
|
||||
node.Parent().RemoveChild(node.Parent(), node)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (b *paragraphParser) CanInterruptParagraph() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (b *paragraphParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,163 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type rawHTMLParser struct {
|
||||
}
|
||||
|
||||
var defaultRawHTMLParser = &rawHTMLParser{}
|
||||
|
||||
// NewRawHTMLParser return a new InlineParser that can parse
|
||||
// inline htmls
|
||||
func NewRawHTMLParser() InlineParser {
|
||||
return defaultRawHTMLParser
|
||||
}
|
||||
|
||||
func (s *rawHTMLParser) Trigger() []byte {
|
||||
return []byte{'<'}
|
||||
}
|
||||
|
||||
func (s *rawHTMLParser) Parse(parent ast.Node, block text.Reader, pc Context) ast.Node {
|
||||
line, _ := block.PeekLine()
|
||||
if len(line) > 1 && util.IsAlphaNumeric(line[1]) {
|
||||
return s.parseMultiLineRegexp(openTagRegexp, block, pc)
|
||||
}
|
||||
if len(line) > 2 && line[1] == '/' && util.IsAlphaNumeric(line[2]) {
|
||||
return s.parseMultiLineRegexp(closeTagRegexp, block, pc)
|
||||
}
|
||||
if bytes.HasPrefix(line, openComment) {
|
||||
return s.parseComment(block, pc)
|
||||
}
|
||||
if bytes.HasPrefix(line, openProcessingInstruction) {
|
||||
return s.parseUntil(block, closeProcessingInstruction, pc)
|
||||
}
|
||||
if len(line) > 2 && line[1] == '!' && line[2] >= 'A' && line[2] <= 'Z' {
|
||||
return s.parseUntil(block, closeDecl, pc)
|
||||
}
|
||||
if bytes.HasPrefix(line, openCDATA) {
|
||||
return s.parseUntil(block, closeCDATA, pc)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var tagnamePattern = `([A-Za-z][A-Za-z0-9-]*)`
|
||||
|
||||
var attributePattern = `(?:[\r\n \t]+[a-zA-Z_:][a-zA-Z0-9:._-]*(?:[\r\n \t]*=[\r\n \t]*(?:[^\"'=<>` + "`" + `\x00-\x20]+|'[^']*'|"[^"]*"))?)`
|
||||
var openTagRegexp = regexp.MustCompile("^<" + tagnamePattern + attributePattern + `*[ \t]*/?>`)
|
||||
var closeTagRegexp = regexp.MustCompile("^</" + tagnamePattern + `\s*>`)
|
||||
|
||||
var openProcessingInstruction = []byte("<?")
|
||||
var closeProcessingInstruction = []byte("?>")
|
||||
var openCDATA = []byte("<![CDATA[")
|
||||
var closeCDATA = []byte("]]>")
|
||||
var closeDecl = []byte(">")
|
||||
var emptyComment = []byte("<!---->")
|
||||
var invalidComment1 = []byte("<!-->")
|
||||
var invalidComment2 = []byte("<!--->")
|
||||
var openComment = []byte("<!--")
|
||||
var closeComment = []byte("-->")
|
||||
var doubleHyphen = []byte("--")
|
||||
|
||||
func (s *rawHTMLParser) parseComment(block text.Reader, pc Context) ast.Node {
|
||||
savedLine, savedSegment := block.Position()
|
||||
node := ast.NewRawHTML()
|
||||
line, segment := block.PeekLine()
|
||||
if bytes.HasPrefix(line, emptyComment) {
|
||||
node.Segments.Append(segment.WithStop(segment.Start + len(emptyComment)))
|
||||
block.Advance(len(emptyComment))
|
||||
return node
|
||||
}
|
||||
if bytes.HasPrefix(line, invalidComment1) || bytes.HasPrefix(line, invalidComment2) {
|
||||
return nil
|
||||
}
|
||||
offset := len(openComment)
|
||||
line = line[offset:]
|
||||
for {
|
||||
hindex := bytes.Index(line, doubleHyphen)
|
||||
if hindex > -1 {
|
||||
hindex += offset
|
||||
}
|
||||
index := bytes.Index(line, closeComment) + offset
|
||||
if index > -1 && hindex == index {
|
||||
if index == 0 || len(line) < 2 || line[index-offset-1] != '-' {
|
||||
node.Segments.Append(segment.WithStop(segment.Start + index + len(closeComment)))
|
||||
block.Advance(index + len(closeComment))
|
||||
return node
|
||||
}
|
||||
}
|
||||
if hindex > 0 {
|
||||
break
|
||||
}
|
||||
node.Segments.Append(segment)
|
||||
block.AdvanceLine()
|
||||
line, segment = block.PeekLine()
|
||||
offset = 0
|
||||
if line == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
block.SetPosition(savedLine, savedSegment)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *rawHTMLParser) parseUntil(block text.Reader, closer []byte, pc Context) ast.Node {
|
||||
savedLine, savedSegment := block.Position()
|
||||
node := ast.NewRawHTML()
|
||||
for {
|
||||
line, segment := block.PeekLine()
|
||||
if line == nil {
|
||||
break
|
||||
}
|
||||
index := bytes.Index(line, closer)
|
||||
if index > -1 {
|
||||
node.Segments.Append(segment.WithStop(segment.Start + index + len(closer)))
|
||||
block.Advance(index + len(closer))
|
||||
return node
|
||||
}
|
||||
node.Segments.Append(segment)
|
||||
block.AdvanceLine()
|
||||
}
|
||||
block.SetPosition(savedLine, savedSegment)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *rawHTMLParser) parseMultiLineRegexp(reg *regexp.Regexp, block text.Reader, pc Context) ast.Node {
|
||||
sline, ssegment := block.Position()
|
||||
if block.Match(reg) {
|
||||
node := ast.NewRawHTML()
|
||||
eline, esegment := block.Position()
|
||||
block.SetPosition(sline, ssegment)
|
||||
for {
|
||||
line, segment := block.PeekLine()
|
||||
if line == nil {
|
||||
break
|
||||
}
|
||||
l, _ := block.Position()
|
||||
start := segment.Start
|
||||
if l == sline {
|
||||
start = ssegment.Start
|
||||
}
|
||||
end := segment.Stop
|
||||
if l == eline {
|
||||
end = esegment.Start
|
||||
}
|
||||
|
||||
node.Segments.Append(text.NewSegment(start, end))
|
||||
if l == eline {
|
||||
block.Advance(end - start)
|
||||
break
|
||||
} else {
|
||||
block.AdvanceLine()
|
||||
}
|
||||
}
|
||||
return node
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,126 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var temporaryParagraphKey = NewContextKey()
|
||||
|
||||
type setextHeadingParser struct {
|
||||
HeadingConfig
|
||||
}
|
||||
|
||||
func matchesSetextHeadingBar(line []byte) (byte, bool) {
|
||||
start := 0
|
||||
end := len(line)
|
||||
space := util.TrimLeftLength(line, []byte{' '})
|
||||
if space > 3 {
|
||||
return 0, false
|
||||
}
|
||||
start += space
|
||||
level1 := util.TrimLeftLength(line[start:end], []byte{'='})
|
||||
c := byte('=')
|
||||
var level2 int
|
||||
if level1 == 0 {
|
||||
level2 = util.TrimLeftLength(line[start:end], []byte{'-'})
|
||||
c = '-'
|
||||
}
|
||||
if util.IsSpace(line[end-1]) {
|
||||
end -= util.TrimRightSpaceLength(line[start:end])
|
||||
}
|
||||
if !((level1 > 0 && start+level1 == end) || (level2 > 0 && start+level2 == end)) {
|
||||
return 0, false
|
||||
}
|
||||
return c, true
|
||||
}
|
||||
|
||||
// NewSetextHeadingParser return a new BlockParser that can parse Setext headings.
|
||||
func NewSetextHeadingParser(opts ...HeadingOption) BlockParser {
|
||||
p := &setextHeadingParser{}
|
||||
for _, o := range opts {
|
||||
o.SetHeadingOption(&p.HeadingConfig)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func (b *setextHeadingParser) Trigger() []byte {
|
||||
return []byte{'-', '='}
|
||||
}
|
||||
|
||||
func (b *setextHeadingParser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
last := pc.LastOpenedBlock().Node
|
||||
if last == nil {
|
||||
return nil, NoChildren
|
||||
}
|
||||
paragraph, ok := last.(*ast.Paragraph)
|
||||
if !ok || paragraph.Parent() != parent {
|
||||
return nil, NoChildren
|
||||
}
|
||||
line, segment := reader.PeekLine()
|
||||
c, ok := matchesSetextHeadingBar(line)
|
||||
if !ok {
|
||||
return nil, NoChildren
|
||||
}
|
||||
level := 1
|
||||
if c == '-' {
|
||||
level = 2
|
||||
}
|
||||
node := ast.NewHeading(level)
|
||||
node.Lines().Append(segment)
|
||||
pc.Set(temporaryParagraphKey, last)
|
||||
return node, NoChildren | RequireParagraph
|
||||
}
|
||||
|
||||
func (b *setextHeadingParser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
return Close
|
||||
}
|
||||
|
||||
func (b *setextHeadingParser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
heading := node.(*ast.Heading)
|
||||
segment := node.Lines().At(0)
|
||||
heading.Lines().Clear()
|
||||
tmp := pc.Get(temporaryParagraphKey).(*ast.Paragraph)
|
||||
pc.Set(temporaryParagraphKey, nil)
|
||||
if tmp.Lines().Len() == 0 {
|
||||
next := heading.NextSibling()
|
||||
segment = segment.TrimLeftSpace(reader.Source())
|
||||
if next == nil || !ast.IsParagraph(next) {
|
||||
para := ast.NewParagraph()
|
||||
para.Lines().Append(segment)
|
||||
heading.Parent().InsertAfter(heading.Parent(), heading, para)
|
||||
} else {
|
||||
next.(ast.Node).Lines().Unshift(segment)
|
||||
}
|
||||
heading.Parent().RemoveChild(heading.Parent(), heading)
|
||||
} else {
|
||||
heading.SetLines(tmp.Lines())
|
||||
heading.SetBlankPreviousLines(tmp.HasBlankPreviousLines())
|
||||
tp := tmp.Parent()
|
||||
if tp != nil {
|
||||
tp.RemoveChild(tp, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
if b.Attribute {
|
||||
parseLastLineAttributes(node, reader, pc)
|
||||
}
|
||||
|
||||
if b.AutoHeadingID {
|
||||
id, ok := node.AttributeString("id")
|
||||
if !ok {
|
||||
generateAutoHeadingID(heading, reader, pc)
|
||||
} else {
|
||||
pc.IDs().Put(id.([]byte))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *setextHeadingParser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *setextHeadingParser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type thematicBreakPraser struct {
|
||||
}
|
||||
|
||||
var defaultThematicBreakPraser = &thematicBreakPraser{}
|
||||
|
||||
// NewThematicBreakParser returns a new BlockParser that
|
||||
// parses thematic breaks.
|
||||
func NewThematicBreakParser() BlockParser {
|
||||
return defaultThematicBreakPraser
|
||||
}
|
||||
|
||||
func isThematicBreak(line []byte, offset int) bool {
|
||||
w, pos := util.IndentWidth(line, offset)
|
||||
if w > 3 {
|
||||
return false
|
||||
}
|
||||
mark := byte(0)
|
||||
count := 0
|
||||
for i := pos; i < len(line); i++ {
|
||||
c := line[i]
|
||||
if util.IsSpace(c) {
|
||||
continue
|
||||
}
|
||||
if mark == 0 {
|
||||
mark = c
|
||||
count = 1
|
||||
if mark == '*' || mark == '-' || mark == '_' {
|
||||
continue
|
||||
}
|
||||
return false
|
||||
}
|
||||
if c != mark {
|
||||
return false
|
||||
}
|
||||
count++
|
||||
}
|
||||
return count > 2
|
||||
}
|
||||
|
||||
func (b *thematicBreakPraser) Trigger() []byte {
|
||||
return []byte{'-', '*', '_'}
|
||||
}
|
||||
|
||||
func (b *thematicBreakPraser) Open(parent ast.Node, reader text.Reader, pc Context) (ast.Node, State) {
|
||||
line, segment := reader.PeekLine()
|
||||
if isThematicBreak(line, reader.LineOffset()) {
|
||||
reader.Advance(segment.Len() - 1)
|
||||
return ast.NewThematicBreak(), NoChildren
|
||||
}
|
||||
return nil, NoChildren
|
||||
}
|
||||
|
||||
func (b *thematicBreakPraser) Continue(node ast.Node, reader text.Reader, pc Context) State {
|
||||
return Close
|
||||
}
|
||||
|
||||
func (b *thematicBreakPraser) Close(node ast.Node, reader text.Reader, pc Context) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
func (b *thematicBreakPraser) CanInterruptParagraph() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (b *thematicBreakPraser) CanAcceptIndentedLine() bool {
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,842 @@
|
|||
package html
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
// A Config struct has configurations for the HTML based renderers.
|
||||
type Config struct {
|
||||
Writer Writer
|
||||
HardWraps bool
|
||||
XHTML bool
|
||||
Unsafe bool
|
||||
}
|
||||
|
||||
// NewConfig returns a new Config with defaults.
|
||||
func NewConfig() Config {
|
||||
return Config{
|
||||
Writer: DefaultWriter,
|
||||
HardWraps: false,
|
||||
XHTML: false,
|
||||
Unsafe: false,
|
||||
}
|
||||
}
|
||||
|
||||
// SetOption implements renderer.NodeRenderer.SetOption.
|
||||
func (c *Config) SetOption(name renderer.OptionName, value interface{}) {
|
||||
switch name {
|
||||
case optHardWraps:
|
||||
c.HardWraps = value.(bool)
|
||||
case optXHTML:
|
||||
c.XHTML = value.(bool)
|
||||
case optUnsafe:
|
||||
c.Unsafe = value.(bool)
|
||||
case optTextWriter:
|
||||
c.Writer = value.(Writer)
|
||||
}
|
||||
}
|
||||
|
||||
// An Option interface sets options for HTML based renderers.
|
||||
type Option interface {
|
||||
SetHTMLOption(*Config)
|
||||
}
|
||||
|
||||
// TextWriter is an option name used in WithWriter.
|
||||
const optTextWriter renderer.OptionName = "Writer"
|
||||
|
||||
type withWriter struct {
|
||||
value Writer
|
||||
}
|
||||
|
||||
func (o *withWriter) SetConfig(c *renderer.Config) {
|
||||
c.Options[optTextWriter] = o.value
|
||||
}
|
||||
|
||||
func (o *withWriter) SetHTMLOption(c *Config) {
|
||||
c.Writer = o.value
|
||||
}
|
||||
|
||||
// WithWriter is a functional option that allow you to set the given writer to
|
||||
// the renderer.
|
||||
func WithWriter(writer Writer) interface {
|
||||
renderer.Option
|
||||
Option
|
||||
} {
|
||||
return &withWriter{writer}
|
||||
}
|
||||
|
||||
// HardWraps is an option name used in WithHardWraps.
|
||||
const optHardWraps renderer.OptionName = "HardWraps"
|
||||
|
||||
type withHardWraps struct {
|
||||
}
|
||||
|
||||
func (o *withHardWraps) SetConfig(c *renderer.Config) {
|
||||
c.Options[optHardWraps] = true
|
||||
}
|
||||
|
||||
func (o *withHardWraps) SetHTMLOption(c *Config) {
|
||||
c.HardWraps = true
|
||||
}
|
||||
|
||||
// WithHardWraps is a functional option that indicates whether softline breaks
|
||||
// should be rendered as '<br>'.
|
||||
func WithHardWraps() interface {
|
||||
renderer.Option
|
||||
Option
|
||||
} {
|
||||
return &withHardWraps{}
|
||||
}
|
||||
|
||||
// XHTML is an option name used in WithXHTML.
|
||||
const optXHTML renderer.OptionName = "XHTML"
|
||||
|
||||
type withXHTML struct {
|
||||
}
|
||||
|
||||
func (o *withXHTML) SetConfig(c *renderer.Config) {
|
||||
c.Options[optXHTML] = true
|
||||
}
|
||||
|
||||
func (o *withXHTML) SetHTMLOption(c *Config) {
|
||||
c.XHTML = true
|
||||
}
|
||||
|
||||
// WithXHTML is a functional option indicates that nodes should be rendered in
|
||||
// xhtml instead of HTML5.
|
||||
func WithXHTML() interface {
|
||||
Option
|
||||
renderer.Option
|
||||
} {
|
||||
return &withXHTML{}
|
||||
}
|
||||
|
||||
// Unsafe is an option name used in WithUnsafe.
|
||||
const optUnsafe renderer.OptionName = "Unsafe"
|
||||
|
||||
type withUnsafe struct {
|
||||
}
|
||||
|
||||
func (o *withUnsafe) SetConfig(c *renderer.Config) {
|
||||
c.Options[optUnsafe] = true
|
||||
}
|
||||
|
||||
func (o *withUnsafe) SetHTMLOption(c *Config) {
|
||||
c.Unsafe = true
|
||||
}
|
||||
|
||||
// WithUnsafe is a functional option that renders dangerous contents
|
||||
// (raw htmls and potentially dangerous links) as it is.
|
||||
func WithUnsafe() interface {
|
||||
renderer.Option
|
||||
Option
|
||||
} {
|
||||
return &withUnsafe{}
|
||||
}
|
||||
|
||||
// A Renderer struct is an implementation of renderer.NodeRenderer that renders
|
||||
// nodes as (X)HTML.
|
||||
type Renderer struct {
|
||||
Config
|
||||
}
|
||||
|
||||
// NewRenderer returns a new Renderer with given options.
|
||||
func NewRenderer(opts ...Option) renderer.NodeRenderer {
|
||||
r := &Renderer{
|
||||
Config: NewConfig(),
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt.SetHTMLOption(&r.Config)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *Renderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
// blocks
|
||||
|
||||
reg.Register(ast.KindDocument, r.renderDocument)
|
||||
reg.Register(ast.KindHeading, r.renderHeading)
|
||||
reg.Register(ast.KindBlockquote, r.renderBlockquote)
|
||||
reg.Register(ast.KindCodeBlock, r.renderCodeBlock)
|
||||
reg.Register(ast.KindFencedCodeBlock, r.renderFencedCodeBlock)
|
||||
reg.Register(ast.KindHTMLBlock, r.renderHTMLBlock)
|
||||
reg.Register(ast.KindList, r.renderList)
|
||||
reg.Register(ast.KindListItem, r.renderListItem)
|
||||
reg.Register(ast.KindParagraph, r.renderParagraph)
|
||||
reg.Register(ast.KindTextBlock, r.renderTextBlock)
|
||||
reg.Register(ast.KindThematicBreak, r.renderThematicBreak)
|
||||
|
||||
// inlines
|
||||
|
||||
reg.Register(ast.KindAutoLink, r.renderAutoLink)
|
||||
reg.Register(ast.KindCodeSpan, r.renderCodeSpan)
|
||||
reg.Register(ast.KindEmphasis, r.renderEmphasis)
|
||||
reg.Register(ast.KindImage, r.renderImage)
|
||||
reg.Register(ast.KindLink, r.renderLink)
|
||||
reg.Register(ast.KindRawHTML, r.renderRawHTML)
|
||||
reg.Register(ast.KindText, r.renderText)
|
||||
reg.Register(ast.KindString, r.renderString)
|
||||
}
|
||||
|
||||
func (r *Renderer) writeLines(w util.BufWriter, source []byte, n ast.Node) {
|
||||
l := n.Lines().Len()
|
||||
for i := 0; i < l; i++ {
|
||||
line := n.Lines().At(i)
|
||||
r.Writer.RawWrite(w, line.Value(source))
|
||||
}
|
||||
}
|
||||
|
||||
// GlobalAttributeFilter defines attribute names which any elements can have.
|
||||
var GlobalAttributeFilter = util.NewBytesFilter(
|
||||
[]byte("accesskey"),
|
||||
[]byte("autocapitalize"),
|
||||
[]byte("autofocus"),
|
||||
[]byte("class"),
|
||||
[]byte("contenteditable"),
|
||||
[]byte("dir"),
|
||||
[]byte("draggable"),
|
||||
[]byte("enterkeyhint"),
|
||||
[]byte("hidden"),
|
||||
[]byte("id"),
|
||||
[]byte("inert"),
|
||||
[]byte("inputmode"),
|
||||
[]byte("is"),
|
||||
[]byte("itemid"),
|
||||
[]byte("itemprop"),
|
||||
[]byte("itemref"),
|
||||
[]byte("itemscope"),
|
||||
[]byte("itemtype"),
|
||||
[]byte("lang"),
|
||||
[]byte("part"),
|
||||
[]byte("slot"),
|
||||
[]byte("spellcheck"),
|
||||
[]byte("style"),
|
||||
[]byte("tabindex"),
|
||||
[]byte("title"),
|
||||
[]byte("translate"),
|
||||
)
|
||||
|
||||
func (r *Renderer) renderDocument(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
// nothing to do
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// HeadingAttributeFilter defines attribute names which heading elements can have
|
||||
var HeadingAttributeFilter = GlobalAttributeFilter
|
||||
|
||||
func (r *Renderer) renderHeading(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.Heading)
|
||||
if entering {
|
||||
_, _ = w.WriteString("<h")
|
||||
_ = w.WriteByte("0123456"[n.Level])
|
||||
if n.Attributes() != nil {
|
||||
RenderAttributes(w, node, HeadingAttributeFilter)
|
||||
}
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("</h")
|
||||
_ = w.WriteByte("0123456"[n.Level])
|
||||
_, _ = w.WriteString(">\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// BlockquoteAttributeFilter defines attribute names which blockquote elements can have
|
||||
var BlockquoteAttributeFilter = GlobalAttributeFilter.Extend(
|
||||
[]byte("cite"),
|
||||
)
|
||||
|
||||
func (r *Renderer) renderBlockquote(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if entering {
|
||||
if n.Attributes() != nil {
|
||||
_, _ = w.WriteString("<blockquote")
|
||||
RenderAttributes(w, n, BlockquoteAttributeFilter)
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("<blockquote>\n")
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("</blockquote>\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *Renderer) renderCodeBlock(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if entering {
|
||||
_, _ = w.WriteString("<pre><code>")
|
||||
r.writeLines(w, source, n)
|
||||
} else {
|
||||
_, _ = w.WriteString("</code></pre>\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *Renderer) renderFencedCodeBlock(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.FencedCodeBlock)
|
||||
if entering {
|
||||
_, _ = w.WriteString("<pre><code")
|
||||
language := n.Language(source)
|
||||
if language != nil {
|
||||
_, _ = w.WriteString(" class=\"language-")
|
||||
r.Writer.Write(w, language)
|
||||
_, _ = w.WriteString("\"")
|
||||
}
|
||||
_ = w.WriteByte('>')
|
||||
r.writeLines(w, source, n)
|
||||
} else {
|
||||
_, _ = w.WriteString("</code></pre>\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *Renderer) renderHTMLBlock(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.HTMLBlock)
|
||||
if entering {
|
||||
if r.Unsafe {
|
||||
l := n.Lines().Len()
|
||||
for i := 0; i < l; i++ {
|
||||
line := n.Lines().At(i)
|
||||
r.Writer.SecureWrite(w, line.Value(source))
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("<!-- raw HTML omitted -->\n")
|
||||
}
|
||||
} else {
|
||||
if n.HasClosure() {
|
||||
if r.Unsafe {
|
||||
closure := n.ClosureLine
|
||||
r.Writer.SecureWrite(w, closure.Value(source))
|
||||
} else {
|
||||
_, _ = w.WriteString("<!-- raw HTML omitted -->\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// ListAttributeFilter defines attribute names which list elements can have.
|
||||
var ListAttributeFilter = GlobalAttributeFilter.Extend(
|
||||
[]byte("start"),
|
||||
[]byte("reversed"),
|
||||
[]byte("type"),
|
||||
)
|
||||
|
||||
func (r *Renderer) renderList(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.List)
|
||||
tag := "ul"
|
||||
if n.IsOrdered() {
|
||||
tag = "ol"
|
||||
}
|
||||
if entering {
|
||||
_ = w.WriteByte('<')
|
||||
_, _ = w.WriteString(tag)
|
||||
if n.IsOrdered() && n.Start != 1 {
|
||||
fmt.Fprintf(w, " start=\"%d\"", n.Start)
|
||||
}
|
||||
if n.Attributes() != nil {
|
||||
RenderAttributes(w, n, ListAttributeFilter)
|
||||
}
|
||||
_, _ = w.WriteString(">\n")
|
||||
} else {
|
||||
_, _ = w.WriteString("</")
|
||||
_, _ = w.WriteString(tag)
|
||||
_, _ = w.WriteString(">\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// ListItemAttributeFilter defines attribute names which list item elements can have.
|
||||
var ListItemAttributeFilter = GlobalAttributeFilter.Extend(
|
||||
[]byte("value"),
|
||||
)
|
||||
|
||||
func (r *Renderer) renderListItem(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if entering {
|
||||
if n.Attributes() != nil {
|
||||
_, _ = w.WriteString("<li")
|
||||
RenderAttributes(w, n, ListItemAttributeFilter)
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("<li>")
|
||||
}
|
||||
fc := n.FirstChild()
|
||||
if fc != nil {
|
||||
if _, ok := fc.(*ast.TextBlock); !ok {
|
||||
_ = w.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("</li>\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// ParagraphAttributeFilter defines attribute names which paragraph elements can have.
|
||||
var ParagraphAttributeFilter = GlobalAttributeFilter
|
||||
|
||||
func (r *Renderer) renderParagraph(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if entering {
|
||||
if n.Attributes() != nil {
|
||||
_, _ = w.WriteString("<p")
|
||||
RenderAttributes(w, n, ParagraphAttributeFilter)
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("<p>")
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("</p>\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *Renderer) renderTextBlock(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
if _, ok := n.NextSibling().(ast.Node); ok && n.FirstChild() != nil {
|
||||
_ = w.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// ThematicAttributeFilter defines attribute names which hr elements can have.
|
||||
var ThematicAttributeFilter = GlobalAttributeFilter.Extend(
|
||||
[]byte("align"), // [Deprecated]
|
||||
[]byte("color"), // [Not Standardized]
|
||||
[]byte("noshade"), // [Deprecated]
|
||||
[]byte("size"), // [Deprecated]
|
||||
[]byte("width"), // [Deprecated]
|
||||
)
|
||||
|
||||
func (r *Renderer) renderThematicBreak(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
_, _ = w.WriteString("<hr")
|
||||
if n.Attributes() != nil {
|
||||
RenderAttributes(w, n, ThematicAttributeFilter)
|
||||
}
|
||||
if r.XHTML {
|
||||
_, _ = w.WriteString(" />\n")
|
||||
} else {
|
||||
_, _ = w.WriteString(">\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// LinkAttributeFilter defines attribute names which link elements can have.
|
||||
var LinkAttributeFilter = GlobalAttributeFilter.Extend(
|
||||
[]byte("download"),
|
||||
// []byte("href"),
|
||||
[]byte("hreflang"),
|
||||
[]byte("media"),
|
||||
[]byte("ping"),
|
||||
[]byte("referrerpolicy"),
|
||||
[]byte("rel"),
|
||||
[]byte("shape"),
|
||||
[]byte("target"),
|
||||
)
|
||||
|
||||
func (r *Renderer) renderAutoLink(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.AutoLink)
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
_, _ = w.WriteString(`<a href="`)
|
||||
url := n.URL(source)
|
||||
label := n.Label(source)
|
||||
if n.AutoLinkType == ast.AutoLinkEmail && !bytes.HasPrefix(bytes.ToLower(url), []byte("mailto:")) {
|
||||
_, _ = w.WriteString("mailto:")
|
||||
}
|
||||
_, _ = w.Write(util.EscapeHTML(util.URLEscape(url, false)))
|
||||
if n.Attributes() != nil {
|
||||
_ = w.WriteByte('"')
|
||||
RenderAttributes(w, n, LinkAttributeFilter)
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString(`">`)
|
||||
}
|
||||
_, _ = w.Write(util.EscapeHTML(label))
|
||||
_, _ = w.WriteString(`</a>`)
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// CodeAttributeFilter defines attribute names which code elements can have.
|
||||
var CodeAttributeFilter = GlobalAttributeFilter
|
||||
|
||||
func (r *Renderer) renderCodeSpan(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if entering {
|
||||
if n.Attributes() != nil {
|
||||
_, _ = w.WriteString("<code")
|
||||
RenderAttributes(w, n, CodeAttributeFilter)
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("<code>")
|
||||
}
|
||||
for c := n.FirstChild(); c != nil; c = c.NextSibling() {
|
||||
segment := c.(*ast.Text).Segment
|
||||
value := segment.Value(source)
|
||||
if bytes.HasSuffix(value, []byte("\n")) {
|
||||
r.Writer.RawWrite(w, value[:len(value)-1])
|
||||
r.Writer.RawWrite(w, []byte(" "))
|
||||
} else {
|
||||
r.Writer.RawWrite(w, value)
|
||||
}
|
||||
}
|
||||
return ast.WalkSkipChildren, nil
|
||||
}
|
||||
_, _ = w.WriteString("</code>")
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// EmphasisAttributeFilter defines attribute names which emphasis elements can have.
|
||||
var EmphasisAttributeFilter = GlobalAttributeFilter
|
||||
|
||||
func (r *Renderer) renderEmphasis(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.Emphasis)
|
||||
tag := "em"
|
||||
if n.Level == 2 {
|
||||
tag = "strong"
|
||||
}
|
||||
if entering {
|
||||
_ = w.WriteByte('<')
|
||||
_, _ = w.WriteString(tag)
|
||||
if n.Attributes() != nil {
|
||||
RenderAttributes(w, n, EmphasisAttributeFilter)
|
||||
}
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("</")
|
||||
_, _ = w.WriteString(tag)
|
||||
_ = w.WriteByte('>')
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *Renderer) renderLink(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.Link)
|
||||
if entering {
|
||||
_, _ = w.WriteString("<a href=\"")
|
||||
if r.Unsafe || !IsDangerousURL(n.Destination) {
|
||||
_, _ = w.Write(util.EscapeHTML(util.URLEscape(n.Destination, true)))
|
||||
}
|
||||
_ = w.WriteByte('"')
|
||||
if n.Title != nil {
|
||||
_, _ = w.WriteString(` title="`)
|
||||
r.Writer.Write(w, n.Title)
|
||||
_ = w.WriteByte('"')
|
||||
}
|
||||
if n.Attributes() != nil {
|
||||
RenderAttributes(w, n, LinkAttributeFilter)
|
||||
}
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("</a>")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
// ImageAttributeFilter defines attribute names which image elements can have.
|
||||
var ImageAttributeFilter = GlobalAttributeFilter.Extend(
|
||||
[]byte("align"),
|
||||
[]byte("border"),
|
||||
[]byte("crossorigin"),
|
||||
[]byte("decoding"),
|
||||
[]byte("height"),
|
||||
[]byte("importance"),
|
||||
[]byte("intrinsicsize"),
|
||||
[]byte("ismap"),
|
||||
[]byte("loading"),
|
||||
[]byte("referrerpolicy"),
|
||||
[]byte("sizes"),
|
||||
[]byte("srcset"),
|
||||
[]byte("usemap"),
|
||||
[]byte("width"),
|
||||
)
|
||||
|
||||
func (r *Renderer) renderImage(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
n := node.(*ast.Image)
|
||||
_, _ = w.WriteString("<img src=\"")
|
||||
if r.Unsafe || !IsDangerousURL(n.Destination) {
|
||||
_, _ = w.Write(util.EscapeHTML(util.URLEscape(n.Destination, true)))
|
||||
}
|
||||
_, _ = w.WriteString(`" alt="`)
|
||||
_, _ = w.Write(util.EscapeHTML(n.Text(source)))
|
||||
_ = w.WriteByte('"')
|
||||
if n.Title != nil {
|
||||
_, _ = w.WriteString(` title="`)
|
||||
r.Writer.Write(w, n.Title)
|
||||
_ = w.WriteByte('"')
|
||||
}
|
||||
if n.Attributes() != nil {
|
||||
RenderAttributes(w, n, ImageAttributeFilter)
|
||||
}
|
||||
if r.XHTML {
|
||||
_, _ = w.WriteString(" />")
|
||||
} else {
|
||||
_, _ = w.WriteString(">")
|
||||
}
|
||||
return ast.WalkSkipChildren, nil
|
||||
}
|
||||
|
||||
func (r *Renderer) renderRawHTML(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkSkipChildren, nil
|
||||
}
|
||||
if r.Unsafe {
|
||||
n := node.(*ast.RawHTML)
|
||||
l := n.Segments.Len()
|
||||
for i := 0; i < l; i++ {
|
||||
segment := n.Segments.At(i)
|
||||
_, _ = w.Write(segment.Value(source))
|
||||
}
|
||||
return ast.WalkSkipChildren, nil
|
||||
}
|
||||
_, _ = w.WriteString("<!-- raw HTML omitted -->")
|
||||
return ast.WalkSkipChildren, nil
|
||||
}
|
||||
|
||||
func (r *Renderer) renderText(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
n := node.(*ast.Text)
|
||||
segment := n.Segment
|
||||
if n.IsRaw() {
|
||||
r.Writer.RawWrite(w, segment.Value(source))
|
||||
} else {
|
||||
r.Writer.Write(w, segment.Value(source))
|
||||
if n.HardLineBreak() || (n.SoftLineBreak() && r.HardWraps) {
|
||||
if r.XHTML {
|
||||
_, _ = w.WriteString("<br />\n")
|
||||
} else {
|
||||
_, _ = w.WriteString("<br>\n")
|
||||
}
|
||||
} else if n.SoftLineBreak() {
|
||||
_ = w.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (r *Renderer) renderString(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
n := node.(*ast.String)
|
||||
if n.IsCode() {
|
||||
_, _ = w.Write(n.Value)
|
||||
} else {
|
||||
if n.IsRaw() {
|
||||
r.Writer.RawWrite(w, n.Value)
|
||||
} else {
|
||||
r.Writer.Write(w, n.Value)
|
||||
}
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
var dataPrefix = []byte("data-")
|
||||
|
||||
// RenderAttributes renders given node's attributes.
|
||||
// You can specify attribute names to render by the filter.
|
||||
// If filter is nil, RenderAttributes renders all attributes.
|
||||
func RenderAttributes(w util.BufWriter, node ast.Node, filter util.BytesFilter) {
|
||||
for _, attr := range node.Attributes() {
|
||||
if filter != nil && !filter.Contains(attr.Name) {
|
||||
if !bytes.HasPrefix(attr.Name, dataPrefix) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
_, _ = w.WriteString(" ")
|
||||
_, _ = w.Write(attr.Name)
|
||||
_, _ = w.WriteString(`="`)
|
||||
// TODO: convert numeric values to strings
|
||||
_, _ = w.Write(util.EscapeHTML(attr.Value.([]byte)))
|
||||
_ = w.WriteByte('"')
|
||||
}
|
||||
}
|
||||
|
||||
// A Writer interface writes textual contents to a writer.
|
||||
type Writer interface {
|
||||
// Write writes the given source to writer with resolving references and unescaping
|
||||
// backslash escaped characters.
|
||||
Write(writer util.BufWriter, source []byte)
|
||||
|
||||
// RawWrite writes the given source to writer without resolving references and
|
||||
// unescaping backslash escaped characters.
|
||||
RawWrite(writer util.BufWriter, source []byte)
|
||||
|
||||
// SecureWrite writes the given source to writer with replacing insecure characters.
|
||||
SecureWrite(writer util.BufWriter, source []byte)
|
||||
}
|
||||
|
||||
var replacementCharacter = []byte("\ufffd")
|
||||
|
||||
type defaultWriter struct {
|
||||
}
|
||||
|
||||
func escapeRune(writer util.BufWriter, r rune) {
|
||||
if r < 256 {
|
||||
v := util.EscapeHTMLByte(byte(r))
|
||||
if v != nil {
|
||||
_, _ = writer.Write(v)
|
||||
return
|
||||
}
|
||||
}
|
||||
_, _ = writer.WriteRune(util.ToValidRune(r))
|
||||
}
|
||||
|
||||
func (d *defaultWriter) SecureWrite(writer util.BufWriter, source []byte) {
|
||||
n := 0
|
||||
l := len(source)
|
||||
for i := 0; i < l; i++ {
|
||||
if source[i] == '\u0000' {
|
||||
_, _ = writer.Write(source[i-n : i])
|
||||
n = 0
|
||||
_, _ = writer.Write(replacementCharacter)
|
||||
continue
|
||||
}
|
||||
n++
|
||||
}
|
||||
if n != 0 {
|
||||
_, _ = writer.Write(source[l-n:])
|
||||
}
|
||||
}
|
||||
|
||||
func (d *defaultWriter) RawWrite(writer util.BufWriter, source []byte) {
|
||||
n := 0
|
||||
l := len(source)
|
||||
for i := 0; i < l; i++ {
|
||||
v := util.EscapeHTMLByte(source[i])
|
||||
if v != nil {
|
||||
_, _ = writer.Write(source[i-n : i])
|
||||
n = 0
|
||||
_, _ = writer.Write(v)
|
||||
continue
|
||||
}
|
||||
n++
|
||||
}
|
||||
if n != 0 {
|
||||
_, _ = writer.Write(source[l-n:])
|
||||
}
|
||||
}
|
||||
|
||||
func (d *defaultWriter) Write(writer util.BufWriter, source []byte) {
|
||||
escaped := false
|
||||
var ok bool
|
||||
limit := len(source)
|
||||
n := 0
|
||||
for i := 0; i < limit; i++ {
|
||||
c := source[i]
|
||||
if escaped {
|
||||
if util.IsPunct(c) {
|
||||
d.RawWrite(writer, source[n:i-1])
|
||||
n = i
|
||||
escaped = false
|
||||
continue
|
||||
}
|
||||
}
|
||||
if c == '\x00' {
|
||||
d.RawWrite(writer, source[n:i])
|
||||
d.RawWrite(writer, replacementCharacter)
|
||||
n = i + 1
|
||||
escaped = false
|
||||
continue
|
||||
}
|
||||
if c == '&' {
|
||||
pos := i
|
||||
next := i + 1
|
||||
if next < limit && source[next] == '#' {
|
||||
nnext := next + 1
|
||||
if nnext < limit {
|
||||
nc := source[nnext]
|
||||
// code point like #x22;
|
||||
if nnext < limit && nc == 'x' || nc == 'X' {
|
||||
start := nnext + 1
|
||||
i, ok = util.ReadWhile(source, [2]int{start, limit}, util.IsHexDecimal)
|
||||
if ok && i < limit && source[i] == ';' && i-start < 7 {
|
||||
v, _ := strconv.ParseUint(util.BytesToReadOnlyString(source[start:i]), 16, 32)
|
||||
d.RawWrite(writer, source[n:pos])
|
||||
n = i + 1
|
||||
escapeRune(writer, rune(v))
|
||||
continue
|
||||
}
|
||||
// code point like #1234;
|
||||
} else if nc >= '0' && nc <= '9' {
|
||||
start := nnext
|
||||
i, ok = util.ReadWhile(source, [2]int{start, limit}, util.IsNumeric)
|
||||
if ok && i < limit && i-start < 8 && source[i] == ';' {
|
||||
v, _ := strconv.ParseUint(util.BytesToReadOnlyString(source[start:i]), 10, 32)
|
||||
d.RawWrite(writer, source[n:pos])
|
||||
n = i + 1
|
||||
escapeRune(writer, rune(v))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
start := next
|
||||
i, ok = util.ReadWhile(source, [2]int{start, limit}, util.IsAlphaNumeric)
|
||||
// entity reference
|
||||
if ok && i < limit && source[i] == ';' {
|
||||
name := util.BytesToReadOnlyString(source[start:i])
|
||||
entity, ok := util.LookUpHTML5EntityByName(name)
|
||||
if ok {
|
||||
d.RawWrite(writer, source[n:pos])
|
||||
n = i + 1
|
||||
d.RawWrite(writer, entity.Characters)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
i = next - 1
|
||||
}
|
||||
if c == '\\' {
|
||||
escaped = true
|
||||
continue
|
||||
}
|
||||
escaped = false
|
||||
}
|
||||
d.RawWrite(writer, source[n:])
|
||||
}
|
||||
|
||||
// DefaultWriter is a default implementation of the Writer.
|
||||
var DefaultWriter = &defaultWriter{}
|
||||
|
||||
var bDataImage = []byte("data:image/")
|
||||
var bPng = []byte("png;")
|
||||
var bGif = []byte("gif;")
|
||||
var bJpeg = []byte("jpeg;")
|
||||
var bWebp = []byte("webp;")
|
||||
var bSvg = []byte("svg;")
|
||||
var bJs = []byte("javascript:")
|
||||
var bVb = []byte("vbscript:")
|
||||
var bFile = []byte("file:")
|
||||
var bData = []byte("data:")
|
||||
|
||||
// IsDangerousURL returns true if the given url seems a potentially dangerous url,
|
||||
// otherwise false.
|
||||
func IsDangerousURL(url []byte) bool {
|
||||
if bytes.HasPrefix(url, bDataImage) && len(url) >= 11 {
|
||||
v := url[11:]
|
||||
if bytes.HasPrefix(v, bPng) || bytes.HasPrefix(v, bGif) ||
|
||||
bytes.HasPrefix(v, bJpeg) || bytes.HasPrefix(v, bWebp) ||
|
||||
bytes.HasPrefix(v, bSvg) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
return bytes.HasPrefix(url, bJs) || bytes.HasPrefix(url, bVb) ||
|
||||
bytes.HasPrefix(url, bFile) || bytes.HasPrefix(url, bData)
|
||||
}
|
|
@ -0,0 +1,174 @@
|
|||
// Package renderer renders the given AST to certain formats.
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"sync"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
// A Config struct is a data structure that holds configuration of the Renderer.
|
||||
type Config struct {
|
||||
Options map[OptionName]interface{}
|
||||
NodeRenderers util.PrioritizedSlice
|
||||
}
|
||||
|
||||
// NewConfig returns a new Config
|
||||
func NewConfig() *Config {
|
||||
return &Config{
|
||||
Options: map[OptionName]interface{}{},
|
||||
NodeRenderers: util.PrioritizedSlice{},
|
||||
}
|
||||
}
|
||||
|
||||
// An OptionName is a name of the option.
|
||||
type OptionName string
|
||||
|
||||
// An Option interface is a functional option type for the Renderer.
|
||||
type Option interface {
|
||||
SetConfig(*Config)
|
||||
}
|
||||
|
||||
type withNodeRenderers struct {
|
||||
value []util.PrioritizedValue
|
||||
}
|
||||
|
||||
func (o *withNodeRenderers) SetConfig(c *Config) {
|
||||
c.NodeRenderers = append(c.NodeRenderers, o.value...)
|
||||
}
|
||||
|
||||
// WithNodeRenderers is a functional option that allow you to add
|
||||
// NodeRenderers to the renderer.
|
||||
func WithNodeRenderers(ps ...util.PrioritizedValue) Option {
|
||||
return &withNodeRenderers{ps}
|
||||
}
|
||||
|
||||
type withOption struct {
|
||||
name OptionName
|
||||
value interface{}
|
||||
}
|
||||
|
||||
func (o *withOption) SetConfig(c *Config) {
|
||||
c.Options[o.name] = o.value
|
||||
}
|
||||
|
||||
// WithOption is a functional option that allow you to set
|
||||
// an arbitrary option to the parser.
|
||||
func WithOption(name OptionName, value interface{}) Option {
|
||||
return &withOption{name, value}
|
||||
}
|
||||
|
||||
// A SetOptioner interface sets given option to the object.
|
||||
type SetOptioner interface {
|
||||
// SetOption sets given option to the object.
|
||||
// Unacceptable options may be passed.
|
||||
// Thus implementations must ignore unacceptable options.
|
||||
SetOption(name OptionName, value interface{})
|
||||
}
|
||||
|
||||
// NodeRendererFunc is a function that renders a given node.
|
||||
type NodeRendererFunc func(writer util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error)
|
||||
|
||||
// A NodeRenderer interface offers NodeRendererFuncs.
|
||||
type NodeRenderer interface {
|
||||
// RendererFuncs registers NodeRendererFuncs to given NodeRendererFuncRegisterer.
|
||||
RegisterFuncs(NodeRendererFuncRegisterer)
|
||||
}
|
||||
|
||||
// A NodeRendererFuncRegisterer registers
|
||||
type NodeRendererFuncRegisterer interface {
|
||||
// Register registers given NodeRendererFunc to this object.
|
||||
Register(ast.NodeKind, NodeRendererFunc)
|
||||
}
|
||||
|
||||
// A Renderer interface renders given AST node to given
|
||||
// writer with given Renderer.
|
||||
type Renderer interface {
|
||||
Render(w io.Writer, source []byte, n ast.Node) error
|
||||
|
||||
// AddOptions adds given option to this renderer.
|
||||
AddOptions(...Option)
|
||||
}
|
||||
|
||||
type renderer struct {
|
||||
config *Config
|
||||
options map[OptionName]interface{}
|
||||
nodeRendererFuncsTmp map[ast.NodeKind]NodeRendererFunc
|
||||
maxKind int
|
||||
nodeRendererFuncs []NodeRendererFunc
|
||||
initSync sync.Once
|
||||
}
|
||||
|
||||
// NewRenderer returns a new Renderer with given options.
|
||||
func NewRenderer(options ...Option) Renderer {
|
||||
config := NewConfig()
|
||||
for _, opt := range options {
|
||||
opt.SetConfig(config)
|
||||
}
|
||||
|
||||
r := &renderer{
|
||||
options: map[OptionName]interface{}{},
|
||||
config: config,
|
||||
nodeRendererFuncsTmp: map[ast.NodeKind]NodeRendererFunc{},
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *renderer) AddOptions(opts ...Option) {
|
||||
for _, opt := range opts {
|
||||
opt.SetConfig(r.config)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *renderer) Register(kind ast.NodeKind, v NodeRendererFunc) {
|
||||
r.nodeRendererFuncsTmp[kind] = v
|
||||
if int(kind) > r.maxKind {
|
||||
r.maxKind = int(kind)
|
||||
}
|
||||
}
|
||||
|
||||
// Render renders the given AST node to the given writer with the given Renderer.
|
||||
func (r *renderer) Render(w io.Writer, source []byte, n ast.Node) error {
|
||||
r.initSync.Do(func() {
|
||||
r.options = r.config.Options
|
||||
r.config.NodeRenderers.Sort()
|
||||
l := len(r.config.NodeRenderers)
|
||||
for i := l - 1; i >= 0; i-- {
|
||||
v := r.config.NodeRenderers[i]
|
||||
nr, _ := v.Value.(NodeRenderer)
|
||||
if se, ok := v.Value.(SetOptioner); ok {
|
||||
for oname, ovalue := range r.options {
|
||||
se.SetOption(oname, ovalue)
|
||||
}
|
||||
}
|
||||
nr.RegisterFuncs(r)
|
||||
}
|
||||
r.nodeRendererFuncs = make([]NodeRendererFunc, r.maxKind+1)
|
||||
for kind, nr := range r.nodeRendererFuncsTmp {
|
||||
r.nodeRendererFuncs[kind] = nr
|
||||
}
|
||||
r.config = nil
|
||||
r.nodeRendererFuncsTmp = nil
|
||||
})
|
||||
writer, ok := w.(util.BufWriter)
|
||||
if !ok {
|
||||
writer = bufio.NewWriter(w)
|
||||
}
|
||||
err := ast.Walk(n, func(n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
s := ast.WalkStatus(ast.WalkContinue)
|
||||
var err error
|
||||
f := r.nodeRendererFuncs[n.Kind()]
|
||||
if f != nil {
|
||||
s, err = f(writer, source, n, entering)
|
||||
}
|
||||
return s, err
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return writer.Flush()
|
||||
}
|
|
@ -0,0 +1,653 @@
|
|||
package text
|
||||
|
||||
import (
|
||||
"io"
|
||||
"regexp"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
const invalidValue = -1
|
||||
|
||||
// EOF indicates the end of file.
|
||||
const EOF = byte(0xff)
|
||||
|
||||
// A Reader interface provides abstracted method for reading text.
|
||||
type Reader interface {
|
||||
io.RuneReader
|
||||
|
||||
// Source returns a source of the reader.
|
||||
Source() []byte
|
||||
|
||||
// ResetPosition resets positions.
|
||||
ResetPosition()
|
||||
|
||||
// Peek returns a byte at current position without advancing the internal pointer.
|
||||
Peek() byte
|
||||
|
||||
// PeekLine returns the current line without advancing the internal pointer.
|
||||
PeekLine() ([]byte, Segment)
|
||||
|
||||
// PrecendingCharacter returns a character just before current internal pointer.
|
||||
PrecendingCharacter() rune
|
||||
|
||||
// Value returns a value of the given segment.
|
||||
Value(Segment) []byte
|
||||
|
||||
// LineOffset returns a distance from the line head to current position.
|
||||
LineOffset() int
|
||||
|
||||
// Position returns current line number and position.
|
||||
Position() (int, Segment)
|
||||
|
||||
// SetPosition sets current line number and position.
|
||||
SetPosition(int, Segment)
|
||||
|
||||
// SetPadding sets padding to the reader.
|
||||
SetPadding(int)
|
||||
|
||||
// Advance advances the internal pointer.
|
||||
Advance(int)
|
||||
|
||||
// AdvanceAndSetPadding advances the internal pointer and add padding to the
|
||||
// reader.
|
||||
AdvanceAndSetPadding(int, int)
|
||||
|
||||
// AdvanceLine advances the internal pointer to the next line head.
|
||||
AdvanceLine()
|
||||
|
||||
// SkipSpaces skips space characters and returns a non-blank line.
|
||||
// If it reaches EOF, returns false.
|
||||
SkipSpaces() (Segment, int, bool)
|
||||
|
||||
// SkipSpaces skips blank lines and returns a non-blank line.
|
||||
// If it reaches EOF, returns false.
|
||||
SkipBlankLines() (Segment, int, bool)
|
||||
|
||||
// Match performs regular expression matching to current line.
|
||||
Match(reg *regexp.Regexp) bool
|
||||
|
||||
// Match performs regular expression searching to current line.
|
||||
FindSubMatch(reg *regexp.Regexp) [][]byte
|
||||
|
||||
// FindClosure finds corresponding closure.
|
||||
FindClosure(opener, closer byte, options FindClosureOptions) (*Segments, bool)
|
||||
}
|
||||
|
||||
// FindClosureOptions is options for Reader.FindClosure
|
||||
type FindClosureOptions struct {
|
||||
// CodeSpan is a flag for the FindClosure. If this is set to true,
|
||||
// FindClosure ignores closers in codespans.
|
||||
CodeSpan bool
|
||||
|
||||
// Nesting is a flag for the FindClosure. If this is set to true,
|
||||
// FindClosure allows nesting.
|
||||
Nesting bool
|
||||
|
||||
// Newline is a flag for the FindClosure. If this is set to true,
|
||||
// FindClosure searches for a closer over multiple lines.
|
||||
Newline bool
|
||||
|
||||
// Advance is a flag for the FindClosure. If this is set to true,
|
||||
// FindClosure advances pointers when closer is found.
|
||||
Advance bool
|
||||
}
|
||||
|
||||
type reader struct {
|
||||
source []byte
|
||||
sourceLength int
|
||||
line int
|
||||
peekedLine []byte
|
||||
pos Segment
|
||||
head int
|
||||
lineOffset int
|
||||
}
|
||||
|
||||
// NewReader return a new Reader that can read UTF-8 bytes .
|
||||
func NewReader(source []byte) Reader {
|
||||
r := &reader{
|
||||
source: source,
|
||||
sourceLength: len(source),
|
||||
}
|
||||
r.ResetPosition()
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *reader) FindClosure(opener, closer byte, options FindClosureOptions) (*Segments, bool) {
|
||||
return findClosureReader(r, opener, closer, options)
|
||||
}
|
||||
|
||||
func (r *reader) ResetPosition() {
|
||||
r.line = -1
|
||||
r.head = 0
|
||||
r.lineOffset = -1
|
||||
r.AdvanceLine()
|
||||
}
|
||||
|
||||
func (r *reader) Source() []byte {
|
||||
return r.source
|
||||
}
|
||||
|
||||
func (r *reader) Value(seg Segment) []byte {
|
||||
return seg.Value(r.source)
|
||||
}
|
||||
|
||||
func (r *reader) Peek() byte {
|
||||
if r.pos.Start >= 0 && r.pos.Start < r.sourceLength {
|
||||
if r.pos.Padding != 0 {
|
||||
return space[0]
|
||||
}
|
||||
return r.source[r.pos.Start]
|
||||
}
|
||||
return EOF
|
||||
}
|
||||
|
||||
func (r *reader) PeekLine() ([]byte, Segment) {
|
||||
if r.pos.Start >= 0 && r.pos.Start < r.sourceLength {
|
||||
if r.peekedLine == nil {
|
||||
r.peekedLine = r.pos.Value(r.Source())
|
||||
}
|
||||
return r.peekedLine, r.pos
|
||||
}
|
||||
return nil, r.pos
|
||||
}
|
||||
|
||||
// io.RuneReader interface
|
||||
func (r *reader) ReadRune() (rune, int, error) {
|
||||
return readRuneReader(r)
|
||||
}
|
||||
|
||||
func (r *reader) LineOffset() int {
|
||||
if r.lineOffset < 0 {
|
||||
v := 0
|
||||
for i := r.head; i < r.pos.Start; i++ {
|
||||
if r.source[i] == '\t' {
|
||||
v += util.TabWidth(v)
|
||||
} else {
|
||||
v++
|
||||
}
|
||||
}
|
||||
r.lineOffset = v - r.pos.Padding
|
||||
}
|
||||
return r.lineOffset
|
||||
}
|
||||
|
||||
func (r *reader) PrecendingCharacter() rune {
|
||||
if r.pos.Start <= 0 {
|
||||
if r.pos.Padding != 0 {
|
||||
return rune(' ')
|
||||
}
|
||||
return rune('\n')
|
||||
}
|
||||
i := r.pos.Start - 1
|
||||
for ; i >= 0; i-- {
|
||||
if utf8.RuneStart(r.source[i]) {
|
||||
break
|
||||
}
|
||||
}
|
||||
rn, _ := utf8.DecodeRune(r.source[i:])
|
||||
return rn
|
||||
}
|
||||
|
||||
func (r *reader) Advance(n int) {
|
||||
r.lineOffset = -1
|
||||
if n < len(r.peekedLine) && r.pos.Padding == 0 {
|
||||
r.pos.Start += n
|
||||
r.peekedLine = nil
|
||||
return
|
||||
}
|
||||
r.peekedLine = nil
|
||||
l := r.sourceLength
|
||||
for ; n > 0 && r.pos.Start < l; n-- {
|
||||
if r.pos.Padding != 0 {
|
||||
r.pos.Padding--
|
||||
continue
|
||||
}
|
||||
if r.source[r.pos.Start] == '\n' {
|
||||
r.AdvanceLine()
|
||||
continue
|
||||
}
|
||||
r.pos.Start++
|
||||
}
|
||||
}
|
||||
|
||||
func (r *reader) AdvanceAndSetPadding(n, padding int) {
|
||||
r.Advance(n)
|
||||
if padding > r.pos.Padding {
|
||||
r.SetPadding(padding)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *reader) AdvanceLine() {
|
||||
r.lineOffset = -1
|
||||
r.peekedLine = nil
|
||||
r.pos.Start = r.pos.Stop
|
||||
r.head = r.pos.Start
|
||||
if r.pos.Start < 0 {
|
||||
return
|
||||
}
|
||||
r.pos.Stop = r.sourceLength
|
||||
for i := r.pos.Start; i < r.sourceLength; i++ {
|
||||
c := r.source[i]
|
||||
if c == '\n' {
|
||||
r.pos.Stop = i + 1
|
||||
break
|
||||
}
|
||||
}
|
||||
r.line++
|
||||
r.pos.Padding = 0
|
||||
}
|
||||
|
||||
func (r *reader) Position() (int, Segment) {
|
||||
return r.line, r.pos
|
||||
}
|
||||
|
||||
func (r *reader) SetPosition(line int, pos Segment) {
|
||||
r.lineOffset = -1
|
||||
r.line = line
|
||||
r.pos = pos
|
||||
}
|
||||
|
||||
func (r *reader) SetPadding(v int) {
|
||||
r.pos.Padding = v
|
||||
}
|
||||
|
||||
func (r *reader) SkipSpaces() (Segment, int, bool) {
|
||||
return skipSpacesReader(r)
|
||||
}
|
||||
|
||||
func (r *reader) SkipBlankLines() (Segment, int, bool) {
|
||||
return skipBlankLinesReader(r)
|
||||
}
|
||||
|
||||
func (r *reader) Match(reg *regexp.Regexp) bool {
|
||||
return matchReader(r, reg)
|
||||
}
|
||||
|
||||
func (r *reader) FindSubMatch(reg *regexp.Regexp) [][]byte {
|
||||
return findSubMatchReader(r, reg)
|
||||
}
|
||||
|
||||
// A BlockReader interface is a reader that is optimized for Blocks.
|
||||
type BlockReader interface {
|
||||
Reader
|
||||
// Reset resets current state and sets new segments to the reader.
|
||||
Reset(segment *Segments)
|
||||
}
|
||||
|
||||
type blockReader struct {
|
||||
source []byte
|
||||
segments *Segments
|
||||
segmentsLength int
|
||||
line int
|
||||
pos Segment
|
||||
head int
|
||||
last int
|
||||
lineOffset int
|
||||
}
|
||||
|
||||
// NewBlockReader returns a new BlockReader.
|
||||
func NewBlockReader(source []byte, segments *Segments) BlockReader {
|
||||
r := &blockReader{
|
||||
source: source,
|
||||
}
|
||||
if segments != nil {
|
||||
r.Reset(segments)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *blockReader) FindClosure(opener, closer byte, options FindClosureOptions) (*Segments, bool) {
|
||||
return findClosureReader(r, opener, closer, options)
|
||||
}
|
||||
|
||||
func (r *blockReader) ResetPosition() {
|
||||
r.line = -1
|
||||
r.head = 0
|
||||
r.last = 0
|
||||
r.lineOffset = -1
|
||||
r.pos.Start = -1
|
||||
r.pos.Stop = -1
|
||||
r.pos.Padding = 0
|
||||
if r.segmentsLength > 0 {
|
||||
last := r.segments.At(r.segmentsLength - 1)
|
||||
r.last = last.Stop
|
||||
}
|
||||
r.AdvanceLine()
|
||||
}
|
||||
|
||||
func (r *blockReader) Reset(segments *Segments) {
|
||||
r.segments = segments
|
||||
r.segmentsLength = segments.Len()
|
||||
r.ResetPosition()
|
||||
}
|
||||
|
||||
func (r *blockReader) Source() []byte {
|
||||
return r.source
|
||||
}
|
||||
|
||||
func (r *blockReader) Value(seg Segment) []byte {
|
||||
line := r.segmentsLength - 1
|
||||
ret := make([]byte, 0, seg.Stop-seg.Start+1)
|
||||
for ; line >= 0; line-- {
|
||||
if seg.Start >= r.segments.At(line).Start {
|
||||
break
|
||||
}
|
||||
}
|
||||
i := seg.Start
|
||||
for ; line < r.segmentsLength; line++ {
|
||||
s := r.segments.At(line)
|
||||
if i < 0 {
|
||||
i = s.Start
|
||||
}
|
||||
ret = s.ConcatPadding(ret)
|
||||
for ; i < seg.Stop && i < s.Stop; i++ {
|
||||
ret = append(ret, r.source[i])
|
||||
}
|
||||
i = -1
|
||||
if s.Stop > seg.Stop {
|
||||
break
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// io.RuneReader interface
|
||||
func (r *blockReader) ReadRune() (rune, int, error) {
|
||||
return readRuneReader(r)
|
||||
}
|
||||
|
||||
func (r *blockReader) PrecendingCharacter() rune {
|
||||
if r.pos.Padding != 0 {
|
||||
return rune(' ')
|
||||
}
|
||||
if r.segments.Len() < 1 {
|
||||
return rune('\n')
|
||||
}
|
||||
firstSegment := r.segments.At(0)
|
||||
if r.line == 0 && r.pos.Start <= firstSegment.Start {
|
||||
return rune('\n')
|
||||
}
|
||||
l := len(r.source)
|
||||
i := r.pos.Start - 1
|
||||
for ; i < l && i >= 0; i-- {
|
||||
if utf8.RuneStart(r.source[i]) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if i < 0 || i >= l {
|
||||
return rune('\n')
|
||||
}
|
||||
rn, _ := utf8.DecodeRune(r.source[i:])
|
||||
return rn
|
||||
}
|
||||
|
||||
func (r *blockReader) LineOffset() int {
|
||||
if r.lineOffset < 0 {
|
||||
v := 0
|
||||
for i := r.head; i < r.pos.Start; i++ {
|
||||
if r.source[i] == '\t' {
|
||||
v += util.TabWidth(v)
|
||||
} else {
|
||||
v++
|
||||
}
|
||||
}
|
||||
r.lineOffset = v - r.pos.Padding
|
||||
}
|
||||
return r.lineOffset
|
||||
}
|
||||
|
||||
func (r *blockReader) Peek() byte {
|
||||
if r.line < r.segmentsLength && r.pos.Start >= 0 && r.pos.Start < r.last {
|
||||
if r.pos.Padding != 0 {
|
||||
return space[0]
|
||||
}
|
||||
return r.source[r.pos.Start]
|
||||
}
|
||||
return EOF
|
||||
}
|
||||
|
||||
func (r *blockReader) PeekLine() ([]byte, Segment) {
|
||||
if r.line < r.segmentsLength && r.pos.Start >= 0 && r.pos.Start < r.last {
|
||||
return r.pos.Value(r.source), r.pos
|
||||
}
|
||||
return nil, r.pos
|
||||
}
|
||||
|
||||
func (r *blockReader) Advance(n int) {
|
||||
r.lineOffset = -1
|
||||
|
||||
if n < r.pos.Stop-r.pos.Start && r.pos.Padding == 0 {
|
||||
r.pos.Start += n
|
||||
return
|
||||
}
|
||||
|
||||
for ; n > 0; n-- {
|
||||
if r.pos.Padding != 0 {
|
||||
r.pos.Padding--
|
||||
continue
|
||||
}
|
||||
if r.pos.Start >= r.pos.Stop-1 && r.pos.Stop < r.last {
|
||||
r.AdvanceLine()
|
||||
continue
|
||||
}
|
||||
r.pos.Start++
|
||||
}
|
||||
}
|
||||
|
||||
func (r *blockReader) AdvanceAndSetPadding(n, padding int) {
|
||||
r.Advance(n)
|
||||
if padding > r.pos.Padding {
|
||||
r.SetPadding(padding)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *blockReader) AdvanceLine() {
|
||||
r.SetPosition(r.line+1, NewSegment(invalidValue, invalidValue))
|
||||
r.head = r.pos.Start
|
||||
}
|
||||
|
||||
func (r *blockReader) Position() (int, Segment) {
|
||||
return r.line, r.pos
|
||||
}
|
||||
|
||||
func (r *blockReader) SetPosition(line int, pos Segment) {
|
||||
r.lineOffset = -1
|
||||
r.line = line
|
||||
if pos.Start == invalidValue {
|
||||
if r.line < r.segmentsLength {
|
||||
s := r.segments.At(line)
|
||||
r.head = s.Start
|
||||
r.pos = s
|
||||
}
|
||||
} else {
|
||||
r.pos = pos
|
||||
if r.line < r.segmentsLength {
|
||||
s := r.segments.At(line)
|
||||
r.head = s.Start
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *blockReader) SetPadding(v int) {
|
||||
r.lineOffset = -1
|
||||
r.pos.Padding = v
|
||||
}
|
||||
|
||||
func (r *blockReader) SkipSpaces() (Segment, int, bool) {
|
||||
return skipSpacesReader(r)
|
||||
}
|
||||
|
||||
func (r *blockReader) SkipBlankLines() (Segment, int, bool) {
|
||||
return skipBlankLinesReader(r)
|
||||
}
|
||||
|
||||
func (r *blockReader) Match(reg *regexp.Regexp) bool {
|
||||
return matchReader(r, reg)
|
||||
}
|
||||
|
||||
func (r *blockReader) FindSubMatch(reg *regexp.Regexp) [][]byte {
|
||||
return findSubMatchReader(r, reg)
|
||||
}
|
||||
|
||||
func skipBlankLinesReader(r Reader) (Segment, int, bool) {
|
||||
lines := 0
|
||||
for {
|
||||
line, seg := r.PeekLine()
|
||||
if line == nil {
|
||||
return seg, lines, false
|
||||
}
|
||||
if util.IsBlank(line) {
|
||||
lines++
|
||||
r.AdvanceLine()
|
||||
} else {
|
||||
return seg, lines, true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func skipSpacesReader(r Reader) (Segment, int, bool) {
|
||||
chars := 0
|
||||
for {
|
||||
line, segment := r.PeekLine()
|
||||
if line == nil {
|
||||
return segment, chars, false
|
||||
}
|
||||
for i, c := range line {
|
||||
if util.IsSpace(c) {
|
||||
chars++
|
||||
r.Advance(1)
|
||||
continue
|
||||
}
|
||||
return segment.WithStart(segment.Start + i + 1), chars, true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func matchReader(r Reader, reg *regexp.Regexp) bool {
|
||||
oldline, oldseg := r.Position()
|
||||
match := reg.FindReaderSubmatchIndex(r)
|
||||
r.SetPosition(oldline, oldseg)
|
||||
if match == nil {
|
||||
return false
|
||||
}
|
||||
r.Advance(match[1] - match[0])
|
||||
return true
|
||||
}
|
||||
|
||||
func findSubMatchReader(r Reader, reg *regexp.Regexp) [][]byte {
|
||||
oldline, oldseg := r.Position()
|
||||
match := reg.FindReaderSubmatchIndex(r)
|
||||
r.SetPosition(oldline, oldseg)
|
||||
if match == nil {
|
||||
return nil
|
||||
}
|
||||
runes := make([]rune, 0, match[1]-match[0])
|
||||
for i := 0; i < match[1]; {
|
||||
r, size, _ := readRuneReader(r)
|
||||
i += size
|
||||
runes = append(runes, r)
|
||||
}
|
||||
result := [][]byte{}
|
||||
for i := 0; i < len(match); i += 2 {
|
||||
result = append(result, []byte(string(runes[match[i]:match[i+1]])))
|
||||
}
|
||||
|
||||
r.SetPosition(oldline, oldseg)
|
||||
r.Advance(match[1] - match[0])
|
||||
return result
|
||||
}
|
||||
|
||||
func readRuneReader(r Reader) (rune, int, error) {
|
||||
line, _ := r.PeekLine()
|
||||
if line == nil {
|
||||
return 0, 0, io.EOF
|
||||
}
|
||||
rn, size := utf8.DecodeRune(line)
|
||||
if rn == utf8.RuneError {
|
||||
return 0, 0, io.EOF
|
||||
}
|
||||
r.Advance(size)
|
||||
return rn, size, nil
|
||||
}
|
||||
|
||||
func findClosureReader(r Reader, opener, closer byte, opts FindClosureOptions) (*Segments, bool) {
|
||||
opened := 1
|
||||
codeSpanOpener := 0
|
||||
closed := false
|
||||
orgline, orgpos := r.Position()
|
||||
var ret *Segments
|
||||
|
||||
for {
|
||||
bs, seg := r.PeekLine()
|
||||
if bs == nil {
|
||||
goto end
|
||||
}
|
||||
i := 0
|
||||
for i < len(bs) {
|
||||
c := bs[i]
|
||||
if opts.CodeSpan && codeSpanOpener != 0 && c == '`' {
|
||||
codeSpanCloser := 0
|
||||
for ; i < len(bs); i++ {
|
||||
if bs[i] == '`' {
|
||||
codeSpanCloser++
|
||||
} else {
|
||||
i--
|
||||
break
|
||||
}
|
||||
}
|
||||
if codeSpanCloser == codeSpanOpener {
|
||||
codeSpanOpener = 0
|
||||
}
|
||||
} else if codeSpanOpener == 0 && c == '\\' && i < len(bs)-1 && util.IsPunct(bs[i+1]) {
|
||||
i += 2
|
||||
continue
|
||||
} else if opts.CodeSpan && codeSpanOpener == 0 && c == '`' {
|
||||
for ; i < len(bs); i++ {
|
||||
if bs[i] == '`' {
|
||||
codeSpanOpener++
|
||||
} else {
|
||||
i--
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if (opts.CodeSpan && codeSpanOpener == 0) || !opts.CodeSpan {
|
||||
if c == closer {
|
||||
opened--
|
||||
if opened == 0 {
|
||||
if ret == nil {
|
||||
ret = NewSegments()
|
||||
}
|
||||
ret.Append(seg.WithStop(seg.Start + i))
|
||||
r.Advance(i + 1)
|
||||
closed = true
|
||||
goto end
|
||||
}
|
||||
} else if c == opener {
|
||||
if !opts.Nesting {
|
||||
goto end
|
||||
}
|
||||
opened++
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
if !opts.Newline {
|
||||
goto end
|
||||
}
|
||||
r.AdvanceLine()
|
||||
if ret == nil {
|
||||
ret = NewSegments()
|
||||
}
|
||||
ret.Append(seg)
|
||||
}
|
||||
end:
|
||||
if !opts.Advance {
|
||||
r.SetPosition(orgline, orgpos)
|
||||
}
|
||||
if closed {
|
||||
return ret, true
|
||||
}
|
||||
return nil, false
|
||||
}
|
|
@ -0,0 +1,209 @@
|
|||
package text
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var space = []byte(" ")
|
||||
|
||||
// A Segment struct holds information about source positions.
|
||||
type Segment struct {
|
||||
// Start is a start position of the segment.
|
||||
Start int
|
||||
|
||||
// Stop is a stop position of the segment.
|
||||
// This value should be excluded.
|
||||
Stop int
|
||||
|
||||
// Padding is a padding length of the segment.
|
||||
Padding int
|
||||
}
|
||||
|
||||
// NewSegment return a new Segment.
|
||||
func NewSegment(start, stop int) Segment {
|
||||
return Segment{
|
||||
Start: start,
|
||||
Stop: stop,
|
||||
Padding: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// NewSegmentPadding returns a new Segment with the given padding.
|
||||
func NewSegmentPadding(start, stop, n int) Segment {
|
||||
return Segment{
|
||||
Start: start,
|
||||
Stop: stop,
|
||||
Padding: n,
|
||||
}
|
||||
}
|
||||
|
||||
// Value returns a value of the segment.
|
||||
func (t *Segment) Value(buffer []byte) []byte {
|
||||
if t.Padding == 0 {
|
||||
return buffer[t.Start:t.Stop]
|
||||
}
|
||||
result := make([]byte, 0, t.Padding+t.Stop-t.Start+1)
|
||||
result = append(result, bytes.Repeat(space, t.Padding)...)
|
||||
return append(result, buffer[t.Start:t.Stop]...)
|
||||
}
|
||||
|
||||
// Len returns a length of the segment.
|
||||
func (t *Segment) Len() int {
|
||||
return t.Stop - t.Start + t.Padding
|
||||
}
|
||||
|
||||
// Between returns a segment between this segment and the given segment.
|
||||
func (t *Segment) Between(other Segment) Segment {
|
||||
if t.Stop != other.Stop {
|
||||
panic("invalid state")
|
||||
}
|
||||
return NewSegmentPadding(
|
||||
t.Start,
|
||||
other.Start,
|
||||
t.Padding-other.Padding,
|
||||
)
|
||||
}
|
||||
|
||||
// IsEmpty returns true if this segment is empty, otherwise false.
|
||||
func (t *Segment) IsEmpty() bool {
|
||||
return t.Start >= t.Stop && t.Padding == 0
|
||||
}
|
||||
|
||||
// TrimRightSpace returns a new segment by slicing off all trailing
|
||||
// space characters.
|
||||
func (t *Segment) TrimRightSpace(buffer []byte) Segment {
|
||||
v := buffer[t.Start:t.Stop]
|
||||
l := util.TrimRightSpaceLength(v)
|
||||
if l == len(v) {
|
||||
return NewSegment(t.Start, t.Start)
|
||||
}
|
||||
return NewSegmentPadding(t.Start, t.Stop-l, t.Padding)
|
||||
}
|
||||
|
||||
// TrimLeftSpace returns a new segment by slicing off all leading
|
||||
// space characters including padding.
|
||||
func (t *Segment) TrimLeftSpace(buffer []byte) Segment {
|
||||
v := buffer[t.Start:t.Stop]
|
||||
l := util.TrimLeftSpaceLength(v)
|
||||
return NewSegment(t.Start+l, t.Stop)
|
||||
}
|
||||
|
||||
// TrimLeftSpaceWidth returns a new segment by slicing off leading space
|
||||
// characters until the given width.
|
||||
func (t *Segment) TrimLeftSpaceWidth(width int, buffer []byte) Segment {
|
||||
padding := t.Padding
|
||||
for ; width > 0; width-- {
|
||||
if padding == 0 {
|
||||
break
|
||||
}
|
||||
padding--
|
||||
}
|
||||
if width == 0 {
|
||||
return NewSegmentPadding(t.Start, t.Stop, padding)
|
||||
}
|
||||
text := buffer[t.Start:t.Stop]
|
||||
start := t.Start
|
||||
for _, c := range text {
|
||||
if start >= t.Stop-1 || width <= 0 {
|
||||
break
|
||||
}
|
||||
if c == ' ' {
|
||||
width--
|
||||
} else if c == '\t' {
|
||||
width -= 4
|
||||
} else {
|
||||
break
|
||||
}
|
||||
start++
|
||||
}
|
||||
if width < 0 {
|
||||
padding = width * -1
|
||||
}
|
||||
return NewSegmentPadding(start, t.Stop, padding)
|
||||
}
|
||||
|
||||
// WithStart returns a new Segment with same value except Start.
|
||||
func (t *Segment) WithStart(v int) Segment {
|
||||
return NewSegmentPadding(v, t.Stop, t.Padding)
|
||||
}
|
||||
|
||||
// WithStop returns a new Segment with same value except Stop.
|
||||
func (t *Segment) WithStop(v int) Segment {
|
||||
return NewSegmentPadding(t.Start, v, t.Padding)
|
||||
}
|
||||
|
||||
// ConcatPadding concats the padding to the given slice.
|
||||
func (t *Segment) ConcatPadding(v []byte) []byte {
|
||||
if t.Padding > 0 {
|
||||
return append(v, bytes.Repeat(space, t.Padding)...)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Segments is a collection of the Segment.
|
||||
type Segments struct {
|
||||
values []Segment
|
||||
}
|
||||
|
||||
// NewSegments return a new Segments.
|
||||
func NewSegments() *Segments {
|
||||
return &Segments{
|
||||
values: nil,
|
||||
}
|
||||
}
|
||||
|
||||
// Append appends the given segment after the tail of the collection.
|
||||
func (s *Segments) Append(t Segment) {
|
||||
if s.values == nil {
|
||||
s.values = make([]Segment, 0, 20)
|
||||
}
|
||||
s.values = append(s.values, t)
|
||||
}
|
||||
|
||||
// AppendAll appends all elements of given segments after the tail of the collection.
|
||||
func (s *Segments) AppendAll(t []Segment) {
|
||||
if s.values == nil {
|
||||
s.values = make([]Segment, 0, 20)
|
||||
}
|
||||
s.values = append(s.values, t...)
|
||||
}
|
||||
|
||||
// Len returns the length of the collection.
|
||||
func (s *Segments) Len() int {
|
||||
if s.values == nil {
|
||||
return 0
|
||||
}
|
||||
return len(s.values)
|
||||
}
|
||||
|
||||
// At returns a segment at the given index.
|
||||
func (s *Segments) At(i int) Segment {
|
||||
return s.values[i]
|
||||
}
|
||||
|
||||
// Set sets the given Segment.
|
||||
func (s *Segments) Set(i int, v Segment) {
|
||||
s.values[i] = v
|
||||
}
|
||||
|
||||
// SetSliced replace the collection with a subsliced value.
|
||||
func (s *Segments) SetSliced(lo, hi int) {
|
||||
s.values = s.values[lo:hi]
|
||||
}
|
||||
|
||||
// Sliced returns a subslice of the collection.
|
||||
func (s *Segments) Sliced(lo, hi int) []Segment {
|
||||
return s.values[lo:hi]
|
||||
}
|
||||
|
||||
// Clear delete all element of the collection.
|
||||
func (s *Segments) Clear() {
|
||||
s.values = nil
|
||||
}
|
||||
|
||||
// Unshift insert the given Segment to head of the collection.
|
||||
func (s *Segments) Unshift(v Segment) {
|
||||
s.values = append(s.values[0:1], s.values[0:]...)
|
||||
s.values[0] = v
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue