mirror of
https://github.com/v2fly/v2ray-core.git
synced 2024-12-21 01:27:03 -05:00
v5: New multi-json loader (rebased from ff59bd37ce
)
This commit is contained in:
parent
d1eafe2b4c
commit
8c78712841
@ -13,5 +13,10 @@ func init() {
|
||||
cmdTLS,
|
||||
cmdUUID,
|
||||
cmdVerify,
|
||||
cmdMerge,
|
||||
|
||||
// documents
|
||||
docFormat,
|
||||
docMerge,
|
||||
)
|
||||
}
|
||||
|
@ -2,32 +2,33 @@ package all
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/v2fly/v2ray-core/v4/commands/base"
|
||||
"github.com/v2fly/v2ray-core/v4/common"
|
||||
"github.com/v2fly/v2ray-core/v4/common/buf"
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf"
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf/merge"
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf/serial"
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
var cmdConvert = &base.Command{
|
||||
UsageLine: "{{.Exec}} convert [json file] [json file] ...",
|
||||
UsageLine: "{{.Exec}} convert [-r] [c1.json] [<url>.json] [dir1] ...",
|
||||
Short: "Convert multiple json config to protobuf",
|
||||
Long: `
|
||||
Convert multiple json config to protobuf.
|
||||
Convert JSON config to protobuf.
|
||||
|
||||
If multiple JSON files or folders specified, it merges them first, then convert.
|
||||
|
||||
Arguments:
|
||||
|
||||
-r
|
||||
Load confdir recursively.
|
||||
|
||||
Examples:
|
||||
|
||||
{{.Exec}} {{.LongName}} config.json c1.json c2.json <url>.json
|
||||
{{.Exec}} {{.LongName}} config.json
|
||||
{{.Exec}} {{.LongName}} c1.json c2.json
|
||||
{{.Exec}} {{.LongName}} c1.json https://url.to/c2.json
|
||||
{{.Exec}} {{.LongName}} "path/to/json_dir"
|
||||
`,
|
||||
}
|
||||
|
||||
@ -35,25 +36,26 @@ func init() {
|
||||
cmdConvert.Run = executeConvert // break init loop
|
||||
}
|
||||
|
||||
var convertReadDirRecursively = cmdConvert.Flag.Bool("r", false, "")
|
||||
|
||||
func executeConvert(cmd *base.Command, args []string) {
|
||||
unnamedArgs := cmdConvert.Flag.Args()
|
||||
if len(unnamedArgs) < 1 {
|
||||
unnamed := cmd.Flag.Args()
|
||||
files := resolveFolderToFiles(unnamed, *convertReadDirRecursively)
|
||||
if len(files) == 0 {
|
||||
base.Fatalf("empty config list")
|
||||
}
|
||||
|
||||
conf := &conf.Config{}
|
||||
for _, arg := range unnamedArgs {
|
||||
fmt.Fprintf(os.Stderr, "Read config: %s", arg)
|
||||
r, err := loadArg(arg)
|
||||
common.Must(err)
|
||||
c, err := serial.DecodeJSONConfig(r)
|
||||
if err != nil {
|
||||
base.Fatalf(err.Error())
|
||||
}
|
||||
conf.Override(c, arg)
|
||||
data, err := merge.FilesToJSON(files)
|
||||
if err != nil {
|
||||
base.Fatalf("failed to load json: %s", err)
|
||||
}
|
||||
r := bytes.NewReader(data)
|
||||
cf, err := serial.DecodeJSONConfig(r)
|
||||
if err != nil {
|
||||
base.Fatalf("failed to decode json: %s", err)
|
||||
}
|
||||
|
||||
pbConfig, err := conf.Build()
|
||||
pbConfig, err := cf.Build()
|
||||
if err != nil {
|
||||
base.Fatalf(err.Error())
|
||||
}
|
||||
@ -67,60 +69,3 @@ func executeConvert(cmd *base.Command, args []string) {
|
||||
base.Fatalf("failed to write proto config: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
// loadArg loads one arg, maybe an remote url, or local file path
|
||||
func loadArg(arg string) (out io.Reader, err error) {
|
||||
var data []byte
|
||||
switch {
|
||||
case strings.HasPrefix(arg, "http://"), strings.HasPrefix(arg, "https://"):
|
||||
data, err = FetchHTTPContent(arg)
|
||||
|
||||
case arg == "stdin:":
|
||||
data, err = ioutil.ReadAll(os.Stdin)
|
||||
|
||||
default:
|
||||
data, err = ioutil.ReadFile(arg)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
out = bytes.NewBuffer(data)
|
||||
return
|
||||
}
|
||||
|
||||
// FetchHTTPContent dials https for remote content
|
||||
func FetchHTTPContent(target string) ([]byte, error) {
|
||||
parsedTarget, err := url.Parse(target)
|
||||
if err != nil {
|
||||
return nil, newError("invalid URL: ", target).Base(err)
|
||||
}
|
||||
|
||||
if s := strings.ToLower(parsedTarget.Scheme); s != "http" && s != "https" {
|
||||
return nil, newError("invalid scheme: ", parsedTarget.Scheme)
|
||||
}
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: 30 * time.Second,
|
||||
}
|
||||
resp, err := client.Do(&http.Request{
|
||||
Method: "GET",
|
||||
URL: parsedTarget,
|
||||
Close: true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, newError("failed to dial to ", target).Base(err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, newError("unexpected HTTP status code: ", resp.StatusCode)
|
||||
}
|
||||
|
||||
content, err := buf.ReadAllToBytes(resp.Body)
|
||||
if err != nil {
|
||||
return nil, newError("failed to read HTTP response").Base(err)
|
||||
}
|
||||
|
||||
return content, nil
|
||||
}
|
||||
|
50
commands/all/format_doc.go
Normal file
50
commands/all/format_doc.go
Normal file
@ -0,0 +1,50 @@
|
||||
package all
|
||||
|
||||
import (
|
||||
"github.com/v2fly/v2ray-core/v4/commands/base"
|
||||
)
|
||||
|
||||
var docFormat = &base.Command{
|
||||
UsageLine: "{{.Exec}} format-loader",
|
||||
Short: "config formats and loading",
|
||||
Long: `
|
||||
{{.Exec}} supports different config formats:
|
||||
|
||||
* json (.json, .jsonc)
|
||||
The default loader, multiple config files support.
|
||||
|
||||
* yaml (.yml)
|
||||
The yaml loader (coming soon?), multiple config files support.
|
||||
|
||||
* protobuf / pb (.pb)
|
||||
Single conifg file support. If multiple files assigned,
|
||||
only the first one is loaded.
|
||||
|
||||
If "-format" is not explicitly specified, {{.Exec}} will choose
|
||||
a loader by detecting the extension of the first config file, or
|
||||
use the default loader.
|
||||
|
||||
The following explains how format loaders behave with examples.
|
||||
|
||||
Examples:
|
||||
|
||||
{{.Exec}} run -d dir (1)
|
||||
{{.Exec}} run -format=protobuf -d dir (2)
|
||||
{{.Exec}} test -c c1.yml -d dir (3)
|
||||
{{.Exec}} test -format=pb -c c1.json (4)
|
||||
|
||||
(1) The default json loader is used, {{.Exec}} will try to load all
|
||||
json files in the "dir".
|
||||
|
||||
(2) The protobuf loader is specified, {{.Exec}} will try to find
|
||||
all protobuf files in the "dir", but only the the first
|
||||
.pb file is loaded.
|
||||
|
||||
(3) The yaml loader is selected because of the "c1.yml" file,
|
||||
{{.Exec}} will try to load "c1.yml" and all yaml files in
|
||||
the "dir".
|
||||
|
||||
(4) The protobuf loader is specified, {{.Exec}} will load
|
||||
"c1.json" as protobuf, no matter its extension.
|
||||
`,
|
||||
}
|
101
commands/all/merge.go
Normal file
101
commands/all/merge.go
Normal file
@ -0,0 +1,101 @@
|
||||
package all
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/v2fly/v2ray-core/v4/commands/base"
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf/merge"
|
||||
)
|
||||
|
||||
var cmdMerge = &base.Command{
|
||||
UsageLine: "{{.Exec}} merge [-r] [c1.json] [url] [dir1] ...",
|
||||
Short: "Merge json files into one",
|
||||
Long: `
|
||||
Merge JSON files into one.
|
||||
|
||||
Arguments:
|
||||
|
||||
-r
|
||||
Load confdir recursively.
|
||||
|
||||
Examples:
|
||||
|
||||
{{.Exec}} {{.LongName}} c1.json c2.json
|
||||
{{.Exec}} {{.LongName}} c1.json https://url.to/c2.json
|
||||
{{.Exec}} {{.LongName}} "path/to/json_dir"
|
||||
`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
cmdMerge.Run = executeMerge
|
||||
}
|
||||
|
||||
var mergeReadDirRecursively = cmdMerge.Flag.Bool("r", false, "")
|
||||
|
||||
func executeMerge(cmd *base.Command, args []string) {
|
||||
unnamed := cmd.Flag.Args()
|
||||
files := resolveFolderToFiles(unnamed, *mergeReadDirRecursively)
|
||||
if len(files) == 0 {
|
||||
base.Fatalf("empty config list")
|
||||
}
|
||||
|
||||
data, err := merge.FilesToJSON(files)
|
||||
if err != nil {
|
||||
base.Fatalf(err.Error())
|
||||
}
|
||||
if _, err := os.Stdout.Write(data); err != nil {
|
||||
base.Fatalf(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
// resolveFolderToFiles expands folder path (if any and it exists) to file paths.
|
||||
// Any other paths, like file, even URL, it returns them as is.
|
||||
func resolveFolderToFiles(paths []string, recursively bool) []string {
|
||||
dirReader := readConfDir
|
||||
if recursively {
|
||||
dirReader = readConfDirRecursively
|
||||
}
|
||||
files := make([]string, 0)
|
||||
for _, p := range paths {
|
||||
i, err := os.Stat(p)
|
||||
if err == nil && i.IsDir() {
|
||||
files = append(files, dirReader(p)...)
|
||||
continue
|
||||
}
|
||||
files = append(files, p)
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
func readConfDir(dirPath string) []string {
|
||||
confs, err := ioutil.ReadDir(dirPath)
|
||||
if err != nil {
|
||||
base.Fatalf("failed to read dir %s: %s", dirPath, err)
|
||||
}
|
||||
files := make([]string, 0)
|
||||
for _, f := range confs {
|
||||
ext := filepath.Ext(f.Name())
|
||||
if ext == ".json" || ext == ".jsonc" {
|
||||
files = append(files, filepath.Join(dirPath, f.Name()))
|
||||
}
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
// getFolderFiles get files in the folder and it's children
|
||||
func readConfDirRecursively(dirPath string) []string {
|
||||
files := make([]string, 0)
|
||||
err := filepath.Walk(dirPath, func(path string, info os.FileInfo, err error) error {
|
||||
ext := filepath.Ext(path)
|
||||
if ext == ".json" || ext == ".jsonc" {
|
||||
files = append(files, path)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
base.Fatalf("failed to read dir %s: %s", dirPath, err)
|
||||
}
|
||||
return files
|
||||
}
|
66
commands/all/merge_doc.go
Normal file
66
commands/all/merge_doc.go
Normal file
@ -0,0 +1,66 @@
|
||||
package all
|
||||
|
||||
import (
|
||||
"github.com/v2fly/v2ray-core/v4/commands/base"
|
||||
)
|
||||
|
||||
var docMerge = &base.Command{
|
||||
UsageLine: "{{.Exec}} json-merge",
|
||||
Short: "json merge logic",
|
||||
Long: `
|
||||
Merging of JSON configs is applied in following commands:
|
||||
|
||||
{{.Exec}} run -c c1.json -c c2.json ...
|
||||
{{.Exec}} merge c1.json https://url.to/c2.json ...
|
||||
{{.Exec}} convert c1.json dir1 ...
|
||||
|
||||
Suppose we have 2 JSON files,
|
||||
|
||||
The 1st one:
|
||||
|
||||
{
|
||||
"log": {"access": "some_value", "loglevel": "debug"},
|
||||
"inbounds": [{"tag": "in-1"}],
|
||||
"outbounds": [{"_priority": 100, "tag": "out-1"}],
|
||||
"routing": {"rules": [
|
||||
{"_tag":"default_route","inboundTag":["in-1"],"outboundTag":"out-1"}
|
||||
]}
|
||||
}
|
||||
|
||||
The 2nd one:
|
||||
|
||||
{
|
||||
"log": {"loglevel": "error"},
|
||||
"inbounds": [{"tag": "in-2"}],
|
||||
"outbounds": [{"_priority": -100, "tag": "out-2"}],
|
||||
"routing": {"rules": [
|
||||
{"inboundTag":["in-2"],"outboundTag":"out-2"},
|
||||
{"_tag":"default_route","inboundTag":["in-1.1"],"outboundTag":"out-1.1"}
|
||||
]}
|
||||
}
|
||||
|
||||
Output:
|
||||
|
||||
{
|
||||
// loglevel is overwritten
|
||||
"log": {"access": "some_value", "loglevel": "error"},
|
||||
"inbounds": [{"tag": "in-1"}, {"tag": "in-2"}],
|
||||
"outbounds": [
|
||||
{"tag": "out-2"}, // note the order is affected by priority
|
||||
{"tag": "out-1"}
|
||||
],
|
||||
"routing": {"rules": [
|
||||
// note 3 rules are merged into 2, and outboundTag is overwritten,
|
||||
// because 2 of them has same tag
|
||||
{"inboundTag":["in-1","in-1.1"],"outboundTag":"out-1.1"}
|
||||
{"inboundTag":["in-2"],"outboundTag":"out-2"}
|
||||
]}
|
||||
}
|
||||
|
||||
Explained:
|
||||
|
||||
- Simple values (string, number, boolean) are overwritten, others are merged
|
||||
- Elements with same "tag" (or "_tag") in an array will be merged
|
||||
- Add "_priority" property to array elements will help sort the array
|
||||
`,
|
||||
}
|
@ -7,7 +7,10 @@ import (
|
||||
|
||||
// CommandEnvHolder is a struct holds the environment info of commands
|
||||
type CommandEnvHolder struct {
|
||||
// Excutable name of current binary
|
||||
Exec string
|
||||
// commands column width of current command
|
||||
CommandsWidth int
|
||||
}
|
||||
|
||||
// CommandEnv holds the environment info of commands
|
||||
|
@ -53,21 +53,17 @@ Usage:
|
||||
|
||||
The commands are:
|
||||
{{range .Commands}}{{if and (ne .Short "") (or (.Runnable) .Commands)}}
|
||||
{{.Name | printf "%-12s"}} {{.Short}}{{end}}{{end}}
|
||||
{{.Name | width $.CommandsWidth}} {{.Short}}{{end}}{{end}}
|
||||
|
||||
Use "{{.Exec}} help{{with .LongName}} {{.}}{{end}} <command>" for more information about a command.
|
||||
`
|
||||
{{if eq (.UsageLine) (.Exec)}}
|
||||
Additional help topics:
|
||||
{{range .Commands}}{{if and (not .Runnable) (not .Commands)}}
|
||||
{{.Name | width $.CommandsWidth}} {{.Short}}{{end}}{{end}}
|
||||
|
||||
// APPEND FOLLOWING TO 'usageTemplate' IF YOU WANT DOC,
|
||||
// A DOC TOPIC IS JUST A COMMAND NOT RUNNABLE:
|
||||
//
|
||||
// {{if eq (.UsageLine) (.Exec)}}
|
||||
// Additional help topics:
|
||||
// {{range .Commands}}{{if and (not .Runnable) (not .Commands)}}
|
||||
// {{.Name | printf "%-15s"}} {{.Short}}{{end}}{{end}}
|
||||
//
|
||||
// Use "{{.Exec}} help{{with .LongName}} {{.}}{{end}} <topic>" for more information about that topic.
|
||||
// {{end}}
|
||||
Use "{{.Exec}} help{{with .LongName}} {{.}}{{end}} <topic>" for more information about that topic.
|
||||
{{end}}
|
||||
`
|
||||
|
||||
var helpTemplate = `{{if .Runnable}}usage: {{.UsageLine}}
|
||||
|
||||
@ -91,7 +87,7 @@ func (w *errWriter) Write(b []byte) (int, error) {
|
||||
// tmpl executes the given template text on data, writing the result to w.
|
||||
func tmpl(w io.Writer, text string, data interface{}) {
|
||||
t := template.New("top")
|
||||
t.Funcs(template.FuncMap{"trim": strings.TrimSpace, "capitalize": capitalize})
|
||||
t.Funcs(template.FuncMap{"trim": strings.TrimSpace, "capitalize": capitalize, "width": width})
|
||||
template.Must(t.Parse(text))
|
||||
ew := &errWriter{w: w}
|
||||
err := t.Execute(ew, data)
|
||||
@ -116,6 +112,11 @@ func capitalize(s string) string {
|
||||
return string(unicode.ToTitle(r)) + s[n:]
|
||||
}
|
||||
|
||||
func width(width int, value string) string {
|
||||
format := fmt.Sprintf("%%-%ds", width)
|
||||
return fmt.Sprintf(format, value)
|
||||
}
|
||||
|
||||
// PrintUsage prints usage of cmd to w
|
||||
func PrintUsage(w io.Writer, cmd *Command) {
|
||||
bw := bufio.NewWriter(w)
|
||||
@ -151,6 +152,15 @@ type tmplData struct {
|
||||
}
|
||||
|
||||
func makeTmplData(cmd *Command) tmplData {
|
||||
// Minimum width of the command column
|
||||
width := 12
|
||||
for _, c := range cmd.Commands {
|
||||
l := len(c.Name())
|
||||
if width < l {
|
||||
width = l
|
||||
}
|
||||
}
|
||||
CommandEnv.CommandsWidth = width
|
||||
return tmplData{
|
||||
Command: cmd,
|
||||
CommandEnvHolder: &CommandEnv,
|
||||
|
83
config.go
83
config.go
@ -5,6 +5,7 @@ package core
|
||||
|
||||
import (
|
||||
"io"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"google.golang.org/protobuf/proto"
|
||||
@ -17,7 +18,7 @@ import (
|
||||
|
||||
// ConfigFormat is a configurable format of V2Ray config file.
|
||||
type ConfigFormat struct {
|
||||
Name string
|
||||
Name []string
|
||||
Extension []string
|
||||
Loader ConfigLoader
|
||||
}
|
||||
@ -32,11 +33,13 @@ var (
|
||||
|
||||
// RegisterConfigLoader add a new ConfigLoader.
|
||||
func RegisterConfigLoader(format *ConfigFormat) error {
|
||||
name := strings.ToLower(format.Name)
|
||||
if _, found := configLoaderByName[name]; found {
|
||||
return newError(format.Name, " already registered.")
|
||||
for _, name := range format.Name {
|
||||
lname := strings.ToLower(name)
|
||||
if _, found := configLoaderByName[lname]; found {
|
||||
return newError(name, " already registered.")
|
||||
}
|
||||
configLoaderByName[lname] = format
|
||||
}
|
||||
configLoaderByName[name] = format
|
||||
|
||||
for _, ext := range format.Extension {
|
||||
lext := strings.ToLower(ext)
|
||||
@ -50,11 +53,33 @@ func RegisterConfigLoader(format *ConfigFormat) error {
|
||||
}
|
||||
|
||||
func getExtension(filename string) string {
|
||||
idx := strings.LastIndexByte(filename, '.')
|
||||
if idx == -1 {
|
||||
return ""
|
||||
ext := filepath.Ext(filename)
|
||||
return strings.ToLower(ext)
|
||||
}
|
||||
|
||||
// GetConfigLoader get config loader by name and filename.
|
||||
// Specify formatName to explicitly select a loader.
|
||||
// Specify filename to choose loader by detect its extension.
|
||||
// Leave formatName and filename blank for default loader
|
||||
func GetConfigLoader(formatName string, filename string) (*ConfigFormat, error) {
|
||||
if formatName != "" {
|
||||
// if explicitly specified, we can safely assume that user knows what they are
|
||||
if f, found := configLoaderByName[formatName]; found {
|
||||
return f, nil
|
||||
}
|
||||
return nil, newError("Unable to load config in ", formatName).AtWarning()
|
||||
}
|
||||
return filename[idx+1:]
|
||||
// no explicitly specified loader, extenstion detect first
|
||||
if ext := getExtension(filename); len(ext) > 0 {
|
||||
if f, found := configLoaderByExt[ext]; found {
|
||||
return f, nil
|
||||
}
|
||||
}
|
||||
// default loader
|
||||
if f, found := configLoaderByName["json"]; found {
|
||||
return f, nil
|
||||
}
|
||||
panic("default loader not found")
|
||||
}
|
||||
|
||||
// LoadConfig loads config with given format from given source.
|
||||
@ -62,27 +87,11 @@ func getExtension(filename string) string {
|
||||
// * []string slice of multiple filename/url(s) to open to read
|
||||
// * io.Reader that reads a config content (the original way)
|
||||
func LoadConfig(formatName string, filename string, input interface{}) (*Config, error) {
|
||||
if formatName != "" {
|
||||
// if clearly specified, we can safely assume that user knows what they are
|
||||
if f, found := configLoaderByName[formatName]; found {
|
||||
return f.Loader(input)
|
||||
}
|
||||
} else {
|
||||
// no explicitly specified loader, extenstion detect first
|
||||
ext := getExtension(filename)
|
||||
if len(ext) > 0 {
|
||||
if f, found := configLoaderByExt[ext]; found {
|
||||
return f.Loader(input)
|
||||
}
|
||||
}
|
||||
// try default loader
|
||||
formatName = "json"
|
||||
if f, found := configLoaderByName[formatName]; found {
|
||||
return f.Loader(input)
|
||||
}
|
||||
f, err := GetConfigLoader(formatName, filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return nil, newError("Unable to load config in ", formatName).AtWarning()
|
||||
return f.Loader(input)
|
||||
}
|
||||
|
||||
func loadProtobufConfig(data []byte) (*Config, error) {
|
||||
@ -95,19 +104,25 @@ func loadProtobufConfig(data []byte) (*Config, error) {
|
||||
|
||||
func init() {
|
||||
common.Must(RegisterConfigLoader(&ConfigFormat{
|
||||
Name: "Protobuf",
|
||||
Extension: []string{"pb"},
|
||||
Name: []string{"Protobuf", "pb"},
|
||||
Extension: []string{".pb"},
|
||||
Loader: func(input interface{}) (*Config, error) {
|
||||
switch v := input.(type) {
|
||||
case cmdarg.Arg:
|
||||
r, err := confloader.LoadConfig(v[0])
|
||||
common.Must(err)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data, err := buf.ReadAllToBytes(r)
|
||||
common.Must(err)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return loadProtobufConfig(data)
|
||||
case io.Reader:
|
||||
data, err := buf.ReadAllToBytes(v)
|
||||
common.Must(err)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return loadProtobufConfig(data)
|
||||
default:
|
||||
return nil, newError("unknow type")
|
||||
|
9
infra/conf/merge/errors.generated.go
Normal file
9
infra/conf/merge/errors.generated.go
Normal file
@ -0,0 +1,9 @@
|
||||
package merge
|
||||
|
||||
import "github.com/v2fly/v2ray-core/v4/common/errors"
|
||||
|
||||
type errPathObjHolder struct{}
|
||||
|
||||
func newError(values ...interface{}) *errors.Error {
|
||||
return errors.New(values...).WithPathObj(errPathObjHolder{})
|
||||
}
|
78
infra/conf/merge/file.go
Normal file
78
infra/conf/merge/file.go
Normal file
@ -0,0 +1,78 @@
|
||||
package merge
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/v2fly/v2ray-core/v4/common/buf"
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf/serial"
|
||||
)
|
||||
|
||||
// loadArg loads one arg, maybe an remote url, or local file path
|
||||
func loadArg(arg string) (out io.Reader, err error) {
|
||||
var data []byte
|
||||
switch {
|
||||
case strings.HasPrefix(arg, "http://"), strings.HasPrefix(arg, "https://"):
|
||||
data, err = fetchHTTPContent(arg)
|
||||
case (arg == "stdin:"):
|
||||
data, err = ioutil.ReadAll(os.Stdin)
|
||||
default:
|
||||
data, err = ioutil.ReadFile(arg)
|
||||
}
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
out = bytes.NewBuffer(data)
|
||||
return
|
||||
}
|
||||
|
||||
// fetchHTTPContent dials https for remote content
|
||||
func fetchHTTPContent(target string) ([]byte, error) {
|
||||
parsedTarget, err := url.Parse(target)
|
||||
if err != nil {
|
||||
return nil, newError("invalid URL: ", target).Base(err)
|
||||
}
|
||||
|
||||
if s := strings.ToLower(parsedTarget.Scheme); s != "http" && s != "https" {
|
||||
return nil, newError("invalid scheme: ", parsedTarget.Scheme)
|
||||
}
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: 30 * time.Second,
|
||||
}
|
||||
resp, err := client.Do(&http.Request{
|
||||
Method: "GET",
|
||||
URL: parsedTarget,
|
||||
Close: true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, newError("failed to dial to ", target).Base(err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, newError("unexpected HTTP status code: ", resp.StatusCode)
|
||||
}
|
||||
|
||||
content, err := buf.ReadAllToBytes(resp.Body)
|
||||
if err != nil {
|
||||
return nil, newError("failed to read HTTP response").Base(err)
|
||||
}
|
||||
|
||||
return content, nil
|
||||
}
|
||||
|
||||
func decode(r io.Reader) (map[string]interface{}, error) {
|
||||
c := make(map[string]interface{})
|
||||
err := serial.DecodeJSON(r, &c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c, nil
|
||||
}
|
43
infra/conf/merge/map.go
Normal file
43
infra/conf/merge/map.go
Normal file
@ -0,0 +1,43 @@
|
||||
// Copyright 2020 Jebbs. All rights reserved.
|
||||
// Use of this source code is governed by MIT
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package merge
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// mergeMaps merges source map into target
|
||||
func mergeMaps(target map[string]interface{}, source map[string]interface{}) (err error) {
|
||||
for key, value := range source {
|
||||
target[key], err = mergeField(target[key], value)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func mergeField(target interface{}, source interface{}) (interface{}, error) {
|
||||
if source == nil {
|
||||
return target, nil
|
||||
}
|
||||
if target == nil {
|
||||
return source, nil
|
||||
}
|
||||
if slice, ok := source.([]interface{}); ok {
|
||||
if tslice, ok := target.([]interface{}); ok {
|
||||
tslice = append(tslice, slice...)
|
||||
return tslice, nil
|
||||
}
|
||||
return nil, fmt.Errorf("value type mismatch, source is 'slice' but target not: %s", source)
|
||||
} else if smap, ok := source.(map[string]interface{}); ok {
|
||||
if tmap, ok := target.(map[string]interface{}); ok {
|
||||
err := mergeMaps(tmap, smap)
|
||||
return tmap, err
|
||||
}
|
||||
return nil, fmt.Errorf("value type mismatch, source is 'map[string]interface{}' but target not: %s", source)
|
||||
}
|
||||
return source, nil
|
||||
}
|
98
infra/conf/merge/merge.go
Normal file
98
infra/conf/merge/merge.go
Normal file
@ -0,0 +1,98 @@
|
||||
// Copyright 2020 Jebbs. All rights reserved.
|
||||
// Use of this source code is governed by MIT
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
Package merge provides the capbility to merge multiple
|
||||
JSON files or contents into one output.
|
||||
|
||||
Merge Rules:
|
||||
|
||||
- Simple values (string, number, boolean) are overwritten, others are merged
|
||||
- Elements with same "tag" (or "_tag") in an array will be merged
|
||||
- Add "_priority" property to array elements will help sort the
|
||||
|
||||
*/
|
||||
package merge
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
// FilesToJSON merges multiple jsons files into one json, accepts remote url, or local file path
|
||||
func FilesToJSON(args []string) ([]byte, error) {
|
||||
m, err := FilesToMap(args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return json.Marshal(m)
|
||||
}
|
||||
|
||||
// BytesToJSON merges multiple json contents into one json.
|
||||
func BytesToJSON(args [][]byte) ([]byte, error) {
|
||||
m, err := BytesToMap(args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return json.Marshal(m)
|
||||
}
|
||||
|
||||
// FilesToMap merges multiple json files into one map, accepts remote url, or local file path
|
||||
func FilesToMap(args []string) (m map[string]interface{}, err error) {
|
||||
m, err = loadFiles(args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = applyRules(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// BytesToMap merges multiple json contents into one map.
|
||||
func BytesToMap(args [][]byte) (m map[string]interface{}, err error) {
|
||||
m, err = loadBytes(args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = applyRules(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func loadFiles(args []string) (map[string]interface{}, error) {
|
||||
conf := make(map[string]interface{})
|
||||
for _, arg := range args {
|
||||
r, err := loadArg(arg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m, err := decode(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = mergeMaps(conf, m); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return conf, nil
|
||||
}
|
||||
|
||||
func loadBytes(args [][]byte) (map[string]interface{}, error) {
|
||||
conf := make(map[string]interface{})
|
||||
for _, arg := range args {
|
||||
r := bytes.NewReader(arg)
|
||||
m, err := decode(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = mergeMaps(conf, m); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return conf, nil
|
||||
}
|
206
infra/conf/merge/merge_test.go
Normal file
206
infra/conf/merge/merge_test.go
Normal file
@ -0,0 +1,206 @@
|
||||
// Copyright 2020 Jebbs. All rights reserved.
|
||||
// Use of this source code is governed by MIT
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package merge_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf/merge"
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf/serial"
|
||||
)
|
||||
|
||||
func TestMergeV2Style(t *testing.T) {
|
||||
json1 := `
|
||||
{
|
||||
"log": {"access": "some_value", "loglevel": "debug"},
|
||||
"inbounds": [{"tag": "in-1"}],
|
||||
"outbounds": [{"_priority": 100, "tag": "out-1"}],
|
||||
"routing": {"rules": [
|
||||
{"_tag":"default_route","inboundTag":["in-1"],"outboundTag":"out-1"}
|
||||
]}
|
||||
}
|
||||
`
|
||||
json2 := `
|
||||
{
|
||||
"log": {"loglevel": "error"},
|
||||
"inbounds": [{"tag": "in-2"}],
|
||||
"outbounds": [{"_priority": -100, "tag": "out-2"}],
|
||||
"routing": {"rules": [
|
||||
{"inboundTag":["in-2"],"outboundTag":"out-2"},
|
||||
{"_tag":"default_route","inboundTag":["in-1.1"],"outboundTag":"out-1.1"}
|
||||
]}
|
||||
}
|
||||
`
|
||||
expected := `
|
||||
{
|
||||
"log": {"access": "some_value", "loglevel": "error"},
|
||||
"inbounds": [{"tag": "in-1"},{"tag": "in-2"}],
|
||||
"outbounds": [
|
||||
{"tag": "out-2"},
|
||||
{"tag": "out-1"}
|
||||
],
|
||||
"routing": {"rules": [
|
||||
{"inboundTag":["in-1","in-1.1"],"outboundTag":"out-1.1"},
|
||||
{"inboundTag":["in-2"],"outboundTag":"out-2"}
|
||||
]}
|
||||
}
|
||||
`
|
||||
m, err := merge.BytesToMap([][]byte{[]byte(json1), []byte(json2)})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
assertResult(t, m, expected)
|
||||
}
|
||||
|
||||
func TestMergeTag(t *testing.T) {
|
||||
json1 := `
|
||||
{
|
||||
"routing": {
|
||||
"rules": [{
|
||||
"tag":"1",
|
||||
"inboundTag": ["in-1"],
|
||||
"outboundTag": "out-1"
|
||||
}]
|
||||
}
|
||||
}
|
||||
`
|
||||
json2 := `
|
||||
{
|
||||
"routing": {
|
||||
"rules": [{
|
||||
"_tag":"1",
|
||||
"inboundTag": ["in-2"],
|
||||
"outboundTag": "out-2"
|
||||
}]
|
||||
}
|
||||
}
|
||||
`
|
||||
expected := `
|
||||
{
|
||||
"routing": {
|
||||
"rules": [{
|
||||
"tag":"1",
|
||||
"inboundTag": ["in-1", "in-2"],
|
||||
"outboundTag": "out-2"
|
||||
}]
|
||||
}
|
||||
}
|
||||
`
|
||||
m, err := merge.BytesToMap([][]byte{[]byte(json1), []byte(json2)})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
assertResult(t, m, expected)
|
||||
}
|
||||
|
||||
func TestMergeTagValueTypes(t *testing.T) {
|
||||
json1 := `
|
||||
{
|
||||
"array_1": [{
|
||||
"_tag":"1",
|
||||
"array_2": [{
|
||||
"_tag":"2",
|
||||
"array_3.1": ["string",true,false],
|
||||
"array_3.2": [1,2,3],
|
||||
"number_1": 1,
|
||||
"number_2": 1,
|
||||
"bool_1": true,
|
||||
"bool_2": true
|
||||
}]
|
||||
}]
|
||||
}
|
||||
`
|
||||
json2 := `
|
||||
{
|
||||
"array_1": [{
|
||||
"_tag":"1",
|
||||
"array_2": [{
|
||||
"_tag":"2",
|
||||
"array_3.1": [0,1,null],
|
||||
"array_3.2": null,
|
||||
"number_1": 0,
|
||||
"number_2": 1,
|
||||
"bool_1": true,
|
||||
"bool_2": false,
|
||||
"null_1": null
|
||||
}]
|
||||
}]
|
||||
}
|
||||
`
|
||||
expected := `
|
||||
{
|
||||
"array_1": [{
|
||||
"array_2": [{
|
||||
"array_3.1": ["string",true,false,0,1,null],
|
||||
"array_3.2": [1,2,3],
|
||||
"number_1": 0,
|
||||
"number_2": 1,
|
||||
"bool_1": true,
|
||||
"bool_2": false,
|
||||
"null_1": null
|
||||
}]
|
||||
}]
|
||||
}
|
||||
`
|
||||
m, err := merge.BytesToMap([][]byte{[]byte(json1), []byte(json2)})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
assertResult(t, m, expected)
|
||||
}
|
||||
|
||||
func TestMergeTagDeep(t *testing.T) {
|
||||
json1 := `
|
||||
{
|
||||
"array_1": [{
|
||||
"_tag":"1",
|
||||
"array_2": [{
|
||||
"_tag":"2",
|
||||
"array_3": [true,false,"string"]
|
||||
}]
|
||||
}]
|
||||
}
|
||||
`
|
||||
json2 := `
|
||||
{
|
||||
"array_1": [{
|
||||
"_tag":"1",
|
||||
"array_2": [{
|
||||
"_tag":"2",
|
||||
"_priority":-100,
|
||||
"array_3": [0,1,null]
|
||||
}]
|
||||
}]
|
||||
}
|
||||
`
|
||||
expected := `
|
||||
{
|
||||
"array_1": [{
|
||||
"array_2": [{
|
||||
"array_3": [0,1,null,true,false,"string"]
|
||||
}]
|
||||
}]
|
||||
}
|
||||
`
|
||||
m, err := merge.BytesToMap([][]byte{[]byte(json1), []byte(json2)})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
assertResult(t, m, expected)
|
||||
}
|
||||
func assertResult(t *testing.T, value map[string]interface{}, expected string) {
|
||||
e := make(map[string]interface{})
|
||||
err := serial.DecodeJSON(strings.NewReader(expected), &e)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if !reflect.DeepEqual(value, e) {
|
||||
bs, _ := json.Marshal(value)
|
||||
t.Fatalf("expected:\n%s\n\nactual:\n%s", expected, string(bs))
|
||||
}
|
||||
}
|
31
infra/conf/merge/priority.go
Normal file
31
infra/conf/merge/priority.go
Normal file
@ -0,0 +1,31 @@
|
||||
// Copyright 2020 Jebbs. All rights reserved.
|
||||
// Use of this source code is governed by MIT
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package merge
|
||||
|
||||
import "sort"
|
||||
|
||||
func getPriority(v interface{}) float64 {
|
||||
var m map[string]interface{}
|
||||
var ok bool
|
||||
if m, ok = v.(map[string]interface{}); !ok {
|
||||
return 0
|
||||
}
|
||||
if i, ok := m[priorityKey]; ok {
|
||||
if p, ok := i.(float64); ok {
|
||||
return p
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// sortByPriority sort slice by priority fields of their elements
|
||||
func sortByPriority(slice []interface{}) {
|
||||
sort.Slice(
|
||||
slice,
|
||||
func(i, j int) bool {
|
||||
return getPriority(slice[i]) < getPriority(slice[j])
|
||||
},
|
||||
)
|
||||
}
|
55
infra/conf/merge/rules.go
Normal file
55
infra/conf/merge/rules.go
Normal file
@ -0,0 +1,55 @@
|
||||
// Copyright 2020 Jebbs. All rights reserved.
|
||||
// Use of this source code is governed by MIT
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package merge
|
||||
|
||||
const priorityKey string = "_priority"
|
||||
const tagKey string = "_tag"
|
||||
|
||||
func applyRules(m map[string]interface{}) error {
|
||||
err := sortMergeSlices(m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
removeHelperFields(m)
|
||||
return nil
|
||||
}
|
||||
|
||||
// sortMergeSlices enumerates all slices in a map, to sort by priority and merge by tag
|
||||
func sortMergeSlices(target map[string]interface{}) error {
|
||||
for key, value := range target {
|
||||
if slice, ok := value.([]interface{}); ok {
|
||||
sortByPriority(slice)
|
||||
s, err := mergeSameTag(slice)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
target[key] = s
|
||||
for _, item := range s {
|
||||
if m, ok := item.(map[string]interface{}); ok {
|
||||
sortMergeSlices(m)
|
||||
}
|
||||
}
|
||||
} else if field, ok := value.(map[string]interface{}); ok {
|
||||
sortMergeSlices(field)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func removeHelperFields(target map[string]interface{}) {
|
||||
for key, value := range target {
|
||||
if key == priorityKey || key == tagKey {
|
||||
delete(target, key)
|
||||
} else if slice, ok := value.([]interface{}); ok {
|
||||
for _, e := range slice {
|
||||
if el, ok := e.(map[string]interface{}); ok {
|
||||
removeHelperFields(el)
|
||||
}
|
||||
}
|
||||
} else if field, ok := value.(map[string]interface{}); ok {
|
||||
removeHelperFields(field)
|
||||
}
|
||||
}
|
||||
}
|
58
infra/conf/merge/tag.go
Normal file
58
infra/conf/merge/tag.go
Normal file
@ -0,0 +1,58 @@
|
||||
// Copyright 2020 Jebbs. All rights reserved.
|
||||
// Use of this source code is governed by MIT
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package merge
|
||||
|
||||
func getTag(v map[string]interface{}) string {
|
||||
if field, ok := v["tag"]; ok {
|
||||
if t, ok := field.(string); ok {
|
||||
return t
|
||||
}
|
||||
}
|
||||
if field, ok := v[tagKey]; ok {
|
||||
if t, ok := field.(string); ok {
|
||||
return t
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func mergeSameTag(s []interface{}) ([]interface{}, error) {
|
||||
// from: [a,"",b,"",a,"",b,""]
|
||||
// to: [a,"",b,"",merged,"",merged,""]
|
||||
merged := &struct{}{}
|
||||
for i, item1 := range s {
|
||||
map1, ok := item1.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
tag1 := getTag(map1)
|
||||
if tag1 == "" {
|
||||
continue
|
||||
}
|
||||
for j := i + 1; j < len(s); j++ {
|
||||
map2, ok := s[j].(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
tag2 := getTag(map2)
|
||||
if tag1 == tag2 {
|
||||
s[j] = merged
|
||||
err := mergeMaps(map1, map2)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// remove merged
|
||||
ns := make([]interface{}, 0)
|
||||
for _, item := range s {
|
||||
if item == merged {
|
||||
continue
|
||||
}
|
||||
ns = append(ns, item)
|
||||
}
|
||||
return ns, nil
|
||||
}
|
@ -42,14 +42,23 @@ func findOffset(b []byte, o int) *offset {
|
||||
// syntax error could be detected.
|
||||
func DecodeJSONConfig(reader io.Reader) (*conf.Config, error) {
|
||||
jsonConfig := &conf.Config{}
|
||||
err := DecodeJSON(reader, jsonConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return jsonConfig, nil
|
||||
}
|
||||
|
||||
// DecodeJSON reads from reader and decode into target
|
||||
// syntax error could be detected.
|
||||
func DecodeJSON(reader io.Reader, target interface{}) error {
|
||||
jsonContent := bytes.NewBuffer(make([]byte, 0, 10240))
|
||||
jsonReader := io.TeeReader(&json_reader.Reader{
|
||||
Reader: reader,
|
||||
}, jsonContent)
|
||||
decoder := json.NewDecoder(jsonReader)
|
||||
|
||||
if err := decoder.Decode(jsonConfig); err != nil {
|
||||
if err := decoder.Decode(target); err != nil {
|
||||
var pos *offset
|
||||
cause := errors.Cause(err)
|
||||
switch tErr := cause.(type) {
|
||||
@ -59,12 +68,12 @@ func DecodeJSONConfig(reader io.Reader) (*conf.Config, error) {
|
||||
pos = findOffset(jsonContent.Bytes(), int(tErr.Offset))
|
||||
}
|
||||
if pos != nil {
|
||||
return nil, newError("failed to read config file at line ", pos.line, " char ", pos.char).Base(err)
|
||||
return newError("failed to read config file at line ", pos.line, " char ", pos.char).Base(err)
|
||||
}
|
||||
return nil, newError("failed to read config file").Base(err)
|
||||
return newError("failed to read config file").Base(err)
|
||||
}
|
||||
|
||||
return jsonConfig, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func LoadJSONConfig(reader io.Reader) (*core.Config, error) {
|
||||
|
@ -5,7 +5,6 @@ import (
|
||||
"log"
|
||||
"os"
|
||||
"os/signal"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
@ -20,49 +19,52 @@ import (
|
||||
// CmdRun runs V2Ray with config
|
||||
var CmdRun = &base.Command{
|
||||
CustomFlags: true,
|
||||
UsageLine: "{{.Exec}} run [-c config.json] [-confdir dir]",
|
||||
UsageLine: "{{.Exec}} run [-c config.json] [-d dir]",
|
||||
Short: "Run V2Ray with config",
|
||||
Long: `
|
||||
Run V2Ray with config.
|
||||
|
||||
Example:
|
||||
|
||||
{{.Exec}} {{.LongName}} -c config.json
|
||||
|
||||
Arguments:
|
||||
|
||||
-c value
|
||||
Short alias of -config
|
||||
-c, -config
|
||||
Config file for V2Ray. Multiple assign is accepted.
|
||||
|
||||
-config value
|
||||
Config file for V2Ray. Multiple assign is accepted (only
|
||||
json). Latter ones overrides the former ones.
|
||||
-d, -confdir
|
||||
A dir with config files. Multiple assign is accepted.
|
||||
|
||||
-confdir string
|
||||
A dir with multiple json config
|
||||
-r
|
||||
Load confdir recursively.
|
||||
|
||||
-format string
|
||||
-format
|
||||
Format of input files. (default "json")
|
||||
`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
CmdRun.Run = executeRun //break init loop
|
||||
Examples:
|
||||
|
||||
{{.Exec}} {{.LongName}} -c config.json
|
||||
{{.Exec}} {{.LongName}} -d path/to/dir
|
||||
|
||||
Use "{{.Exec}} help format-loader" for more information about format.
|
||||
`,
|
||||
Run: executeRun,
|
||||
}
|
||||
|
||||
var (
|
||||
configFiles cmdarg.Arg // "Config file for V2Ray.", the option is customed type
|
||||
configDir string
|
||||
configFormat *string
|
||||
configFiles cmdarg.Arg
|
||||
configDirs cmdarg.Arg
|
||||
configFormat *string
|
||||
configDirRecursively *bool
|
||||
)
|
||||
|
||||
func setConfigFlags(cmd *base.Command) {
|
||||
configFormat = cmd.Flag.String("format", "", "")
|
||||
configFormat = cmd.Flag.String("format", "json", "")
|
||||
configDirRecursively = cmd.Flag.Bool("r", false, "")
|
||||
|
||||
cmd.Flag.Var(&configFiles, "config", "")
|
||||
cmd.Flag.Var(&configFiles, "c", "")
|
||||
cmd.Flag.StringVar(&configDir, "confdir", "", "")
|
||||
cmd.Flag.Var(&configDirs, "confdir", "")
|
||||
cmd.Flag.Var(&configDirs, "d", "")
|
||||
}
|
||||
|
||||
func executeRun(cmd *base.Command, args []string) {
|
||||
setConfigFlags(cmd)
|
||||
cmd.Flag.Parse(args)
|
||||
@ -100,32 +102,81 @@ func dirExists(file string) bool {
|
||||
return err == nil && info.IsDir()
|
||||
}
|
||||
|
||||
func readConfDir(dirPath string) cmdarg.Arg {
|
||||
func readConfDir(dirPath string, extension []string) cmdarg.Arg {
|
||||
confs, err := ioutil.ReadDir(dirPath)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
base.Fatalf("failed to read dir %s: %s", dirPath, err)
|
||||
}
|
||||
files := make(cmdarg.Arg, 0)
|
||||
for _, f := range confs {
|
||||
if strings.HasSuffix(f.Name(), ".json") {
|
||||
files.Set(path.Join(dirPath, f.Name()))
|
||||
ext := filepath.Ext(f.Name())
|
||||
for _, e := range extension {
|
||||
if strings.EqualFold(e, ext) {
|
||||
files.Set(filepath.Join(dirPath, f.Name()))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
// getFolderFiles get files in the folder and it's children
|
||||
func readConfDirRecursively(dirPath string, extension []string) cmdarg.Arg {
|
||||
files := make(cmdarg.Arg, 0)
|
||||
err := filepath.Walk(dirPath, func(path string, info os.FileInfo, err error) error {
|
||||
ext := filepath.Ext(path)
|
||||
for _, e := range extension {
|
||||
if strings.EqualFold(e, ext) {
|
||||
files.Set(path)
|
||||
break
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
base.Fatalf("failed to read dir %s: %s", dirPath, err)
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
func getLoaderExtension() ([]string, error) {
|
||||
firstFile := ""
|
||||
if len(configFiles) > 0 {
|
||||
firstFile = configFiles[0]
|
||||
}
|
||||
loader, err := core.GetConfigLoader(*configFormat, firstFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return loader.Extension, nil
|
||||
}
|
||||
|
||||
func getConfigFilePath() cmdarg.Arg {
|
||||
if dirExists(configDir) {
|
||||
log.Println("Using confdir from arg:", configDir)
|
||||
configFiles = append(configFiles, readConfDir(configDir)...)
|
||||
extension, err := getLoaderExtension()
|
||||
if err != nil {
|
||||
base.Fatalf(err.Error())
|
||||
}
|
||||
dirReader := readConfDir
|
||||
if *configDirRecursively {
|
||||
dirReader = readConfDirRecursively
|
||||
}
|
||||
if len(configDirs) > 0 {
|
||||
for _, d := range configDirs {
|
||||
log.Println("Using confdir from arg:", d)
|
||||
configFiles = append(configFiles, dirReader(d, extension)...)
|
||||
}
|
||||
} else if envConfDir := platform.GetConfDirPath(); dirExists(envConfDir) {
|
||||
log.Println("Using confdir from env:", envConfDir)
|
||||
configFiles = append(configFiles, readConfDir(envConfDir)...)
|
||||
configFiles = append(configFiles, dirReader(envConfDir, extension)...)
|
||||
}
|
||||
if len(configFiles) > 0 {
|
||||
return configFiles
|
||||
}
|
||||
|
||||
if len(configFiles) == 0 && len(configDirs) > 0 {
|
||||
base.Fatalf("no config file found with extension: %s", extension)
|
||||
}
|
||||
|
||||
if workingDir, err := os.Getwd(); err == nil {
|
||||
configFile := filepath.Join(workingDir, "config.json")
|
||||
if fileExists(configFile) {
|
||||
@ -143,19 +194,10 @@ func getConfigFilePath() cmdarg.Arg {
|
||||
return cmdarg.Arg{"stdin:"}
|
||||
}
|
||||
|
||||
func getFormatFromAlias() string {
|
||||
switch strings.ToLower(*configFormat) {
|
||||
case "pb":
|
||||
return "protobuf"
|
||||
default:
|
||||
return *configFormat
|
||||
}
|
||||
}
|
||||
|
||||
func startV2Ray() (core.Server, error) {
|
||||
configFiles := getConfigFilePath()
|
||||
|
||||
config, err := core.LoadConfig(getFormatFromAlias(), configFiles[0], configFiles)
|
||||
config, err := core.LoadConfig(*configFormat, configFiles[0], configFiles)
|
||||
if err != nil {
|
||||
return nil, newError("failed to read config files: [", configFiles.String(), "]").Base(err)
|
||||
}
|
||||
|
@ -11,50 +11,64 @@ import (
|
||||
// CmdTest tests config files
|
||||
var CmdTest = &base.Command{
|
||||
CustomFlags: true,
|
||||
UsageLine: "{{.Exec}} test [-format=json] [-c config.json] [-confdir dir]",
|
||||
UsageLine: "{{.Exec}} test [-format=json] [-c config.json] [-d dir]",
|
||||
Short: "Test config files",
|
||||
Long: `
|
||||
Test config files, without launching V2Ray server.
|
||||
|
||||
Example:
|
||||
|
||||
{{.Exec}} {{.LongName}} -c config.json
|
||||
|
||||
Arguments:
|
||||
|
||||
-c value
|
||||
Short alias of -config
|
||||
-c, -config
|
||||
Config file for V2Ray. Multiple assign is accepted.
|
||||
|
||||
-config value
|
||||
Config file for V2Ray. Multiple assign is accepted (only
|
||||
json). Latter ones overrides the former ones.
|
||||
-d, -confdir
|
||||
A dir with config files. Multiple assign is accepted.
|
||||
|
||||
-confdir string
|
||||
A dir with multiple json config
|
||||
-r
|
||||
Load confdir recursively.
|
||||
|
||||
-format string
|
||||
-format
|
||||
Format of input files. (default "json")
|
||||
`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
CmdTest.Run = executeTest //break init loop
|
||||
Examples:
|
||||
|
||||
{{.Exec}} {{.LongName}} -c config.json
|
||||
{{.Exec}} {{.LongName}} -d path/to/dir
|
||||
|
||||
Use "{{.Exec}} help format-loader" for more information about format.
|
||||
`,
|
||||
Run: executeTest,
|
||||
}
|
||||
|
||||
func executeTest(cmd *base.Command, args []string) {
|
||||
setConfigFlags(cmd)
|
||||
cmd.Flag.Parse(args)
|
||||
if dirExists(configDir) {
|
||||
log.Println("Using confdir from arg:", configDir)
|
||||
configFiles = append(configFiles, readConfDir(configDir)...)
|
||||
|
||||
extension, err := getLoaderExtension()
|
||||
if err != nil {
|
||||
base.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
if len(configDirs) > 0 {
|
||||
dirReader := readConfDir
|
||||
if *configDirRecursively {
|
||||
dirReader = readConfDirRecursively
|
||||
}
|
||||
for _, d := range configDirs {
|
||||
log.Println("Using confdir from arg:", d)
|
||||
configFiles = append(configFiles, dirReader(d, extension)...)
|
||||
}
|
||||
}
|
||||
if len(configFiles) == 0 {
|
||||
cmd.Flag.Usage()
|
||||
base.SetExitStatus(1)
|
||||
base.Exit()
|
||||
if len(configDirs) == 0 {
|
||||
cmd.Flag.Usage()
|
||||
base.SetExitStatus(1)
|
||||
base.Exit()
|
||||
}
|
||||
base.Fatalf("no config file found with extension: %s", extension)
|
||||
}
|
||||
printVersion()
|
||||
_, err := startV2RayTesting()
|
||||
_, err = startV2RayTesting()
|
||||
if err != nil {
|
||||
base.Fatalf("Test failed: %s", err)
|
||||
}
|
||||
@ -62,7 +76,7 @@ func executeTest(cmd *base.Command, args []string) {
|
||||
}
|
||||
|
||||
func startV2RayTesting() (core.Server, error) {
|
||||
config, err := core.LoadConfig(getFormatFromAlias(), configFiles[0], configFiles)
|
||||
config, err := core.LoadConfig(*configFormat, configFiles[0], configFiles)
|
||||
if err != nil {
|
||||
return nil, newError("failed to read config files: [", configFiles.String(), "]").Base(err)
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ import (
|
||||
var CmdVersion = &base.Command{
|
||||
UsageLine: "{{.Exec}} version",
|
||||
Short: "Print V2Ray Versions",
|
||||
Long: `Version prints the build information for V2Ray executables.
|
||||
Long: `Prints the build information for V2Ray.
|
||||
`,
|
||||
Run: executeVersion,
|
||||
}
|
||||
|
@ -14,8 +14,8 @@ import (
|
||||
|
||||
func init() {
|
||||
common.Must(core.RegisterConfigLoader(&core.ConfigFormat{
|
||||
Name: "JSON",
|
||||
Extension: []string{"json"},
|
||||
Name: []string{"JSON"},
|
||||
Extension: []string{".json", ".jsonc"},
|
||||
Loader: func(input interface{}) (*core.Config, error) {
|
||||
switch v := input.(type) {
|
||||
case cmdarg.Arg:
|
||||
|
@ -1,37 +1,31 @@
|
||||
package jsonem
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
|
||||
core "github.com/v2fly/v2ray-core/v4"
|
||||
"github.com/v2fly/v2ray-core/v4/common"
|
||||
"github.com/v2fly/v2ray-core/v4/common/cmdarg"
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf"
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf/merge"
|
||||
"github.com/v2fly/v2ray-core/v4/infra/conf/serial"
|
||||
"github.com/v2fly/v2ray-core/v4/main/confloader"
|
||||
)
|
||||
|
||||
func init() {
|
||||
common.Must(core.RegisterConfigLoader(&core.ConfigFormat{
|
||||
Name: "JSON",
|
||||
Extension: []string{"json"},
|
||||
Name: []string{"JSON"},
|
||||
Extension: []string{".json", ".jsonc"},
|
||||
Loader: func(input interface{}) (*core.Config, error) {
|
||||
switch v := input.(type) {
|
||||
case cmdarg.Arg:
|
||||
cf := &conf.Config{}
|
||||
for i, arg := range v {
|
||||
newError("Reading config: ", arg).AtInfo().WriteToLog()
|
||||
r, err := confloader.LoadConfig(arg)
|
||||
common.Must(err)
|
||||
c, err := serial.DecodeJSONConfig(r)
|
||||
common.Must(err)
|
||||
if i == 0 {
|
||||
// This ensure even if the muti-json parser do not support a setting,
|
||||
// It is still respected automatically for the first configure file
|
||||
*cf = *c
|
||||
continue
|
||||
}
|
||||
cf.Override(c, arg)
|
||||
data, err := merge.FilesToJSON(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := bytes.NewReader(data)
|
||||
cf, err := serial.DecodeJSONConfig(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cf.Build()
|
||||
case io.Reader:
|
||||
|
Loading…
Reference in New Issue
Block a user