⚡ Improve app launch speed
This commit is contained in:
@@ -107,33 +107,38 @@ const clangParser: Parser<string> = {
|
||||
locEnd: (node: string) => node.length,
|
||||
};
|
||||
|
||||
// Initialize clang-format WASM module
|
||||
// Lazy initialize clang-format WASM module
|
||||
let initPromise: Promise<void> | null = null;
|
||||
let isInitialized = false;
|
||||
|
||||
function initClangFormat(): Promise<void> {
|
||||
if (initPromise) {
|
||||
return initPromise;
|
||||
if (isInitialized) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
initPromise = (async () => {
|
||||
if (!isInitialized) {
|
||||
await clangFormatInit();
|
||||
isInitialized = true;
|
||||
}
|
||||
})();
|
||||
if (!initPromise) {
|
||||
initPromise = (async () => {
|
||||
try {
|
||||
await clangFormatInit();
|
||||
isInitialized = true;
|
||||
} catch (error) {
|
||||
console.warn('Failed to initialize clang-format WASM module:', error);
|
||||
initPromise = null;
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
// Printer configuration
|
||||
const clangPrinter: Printer<string> = {
|
||||
print: (path, options) => {
|
||||
// @ts-expect-error -- Support async printer like shell plugin
|
||||
async print(path, options) {
|
||||
try {
|
||||
if (!isInitialized) {
|
||||
console.warn('clang-format WASM module not initialized, returning original text');
|
||||
return (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
}
|
||||
// Wait for initialization to complete
|
||||
await initClangFormat();
|
||||
|
||||
const text = (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
const style = getClangStyle(options);
|
||||
@@ -205,11 +210,6 @@ const clangPlugin: Plugin = {
|
||||
...options,
|
||||
};
|
||||
|
||||
// Initialize WASM module when plugin loads
|
||||
initClangFormat().catch(error => {
|
||||
console.warn('Failed to initialize clang-format WASM module:', error);
|
||||
});
|
||||
|
||||
export default clangPlugin;
|
||||
export { languages };
|
||||
export const parsers = clangPlugin.parsers;
|
||||
|
||||
@@ -33,33 +33,38 @@ const dartParser: Parser<string> = {
|
||||
locEnd: (node: string) => node.length,
|
||||
};
|
||||
|
||||
// Initialize Dart WASM module
|
||||
// Lazy initialize Dart WASM module
|
||||
let initPromise: Promise<void> | null = null;
|
||||
let isInitialized = false;
|
||||
|
||||
function initDart(): Promise<void> {
|
||||
if (initPromise) {
|
||||
return initPromise;
|
||||
if (isInitialized) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
initPromise = (async () => {
|
||||
if (!isInitialized) {
|
||||
await dartInit();
|
||||
isInitialized = true;
|
||||
}
|
||||
})();
|
||||
if (!initPromise) {
|
||||
initPromise = (async () => {
|
||||
try {
|
||||
await dartInit();
|
||||
isInitialized = true;
|
||||
} catch (error) {
|
||||
console.warn('Failed to initialize Dart WASM module:', error);
|
||||
initPromise = null;
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
// Printer configuration
|
||||
const dartPrinter: Printer<string> = {
|
||||
print: (path, options) => {
|
||||
// @ts-expect-error -- Support async printer like shell plugin
|
||||
async print(path, options) {
|
||||
try {
|
||||
if (!isInitialized) {
|
||||
console.warn('Dart WASM module not initialized, returning original text');
|
||||
return (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
}
|
||||
// Wait for initialization to complete
|
||||
await initDart();
|
||||
|
||||
const text = (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
const config = getDartConfig(options);
|
||||
@@ -121,11 +126,6 @@ const dartPlugin: Plugin = {
|
||||
options,
|
||||
};
|
||||
|
||||
// Initialize WASM module when plugin loads
|
||||
initDart().catch(error => {
|
||||
console.warn('Failed to initialize Dart WASM module:', error);
|
||||
});
|
||||
|
||||
export default dartPlugin;
|
||||
export { languages };
|
||||
export const parsers = dartPlugin.parsers;
|
||||
|
||||
Binary file not shown.
10
frontend/src/common/prettier/plugins/go/go.d.ts
vendored
10
frontend/src/common/prettier/plugins/go/go.d.ts
vendored
@@ -1,10 +0,0 @@
|
||||
import { Parser, Plugin } from "prettier";
|
||||
|
||||
export declare const languages: Plugin["languages"];
|
||||
export declare const parsers: {
|
||||
go: Parser;
|
||||
};
|
||||
export declare const printers: Plugin["printers"];
|
||||
|
||||
declare const plugin: Plugin;
|
||||
export default plugin;
|
||||
@@ -1,142 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Go Prettier Format Plugin
|
||||
* A Prettier plugin for formatting Go code using WebAssembly.
|
||||
* This plugin leverages Go's native formatting capabilities through WASM.
|
||||
*/
|
||||
import "./wasm_exec.js"
|
||||
/** @type {Promise<void>|null} */
|
||||
let initializePromise;
|
||||
|
||||
/**
|
||||
* Initializes the Go WebAssembly module for formatting Go code.
|
||||
* This function sets up the WASM runtime and makes the formatGo function
|
||||
* available on the global object.
|
||||
*
|
||||
* @async
|
||||
* @function initialize
|
||||
* @returns {Promise<void>} A promise that resolves when the WASM module is ready
|
||||
* @throws {Error} If the WASM file cannot be loaded or instantiated
|
||||
*/
|
||||
function initialize() {
|
||||
if (initializePromise) {
|
||||
return initializePromise;
|
||||
}
|
||||
|
||||
initializePromise = (async () => {
|
||||
|
||||
const go = new TinyGo();
|
||||
|
||||
// Load WASM file from browser
|
||||
const response = await fetch('./go-format.wasm');
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load WASM file: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
const wasmBuffer = await response.arrayBuffer();
|
||||
|
||||
const { instance } = await WebAssembly.instantiate(
|
||||
wasmBuffer,
|
||||
go.importObject
|
||||
);
|
||||
|
||||
// go.run returns a promise that resolves when the go program exits.
|
||||
// Since our program is a long-running service (it exposes a function and waits),
|
||||
// we don't await it.
|
||||
go.run(instance);
|
||||
|
||||
// The `formatGo` function is now available on the global object.
|
||||
})();
|
||||
|
||||
return initializePromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prettier language configuration for Go.
|
||||
* Defines the language settings, file extensions, and parser mappings.
|
||||
*
|
||||
* @type {Array<Object>}
|
||||
* @property {string} name - The display name of the language
|
||||
* @property {string[]} parsers - Array of parser names for this language
|
||||
* @property {string[]} extensions - File extensions associated with this language
|
||||
* @property {string[]} vscodeLanguageIds - VSCode language identifier mappings
|
||||
*/
|
||||
const languages = [
|
||||
{
|
||||
name: "Go",
|
||||
parsers: ["go-format"],
|
||||
extensions: [".go"],
|
||||
vscodeLanguageIds: ["go"],
|
||||
},
|
||||
];
|
||||
|
||||
/**
|
||||
* Prettier parser configuration for Go.
|
||||
* Defines how Go source code should be parsed and processed.
|
||||
*
|
||||
* @type {Object<string, Object>}
|
||||
* @property {Object} go-format - Go language parser configuration
|
||||
* @property {Function} go-format.parse - Parser function that returns the input text as-is
|
||||
* @property {string} go-format.astFormat - AST format identifier for the printer
|
||||
* @property {Function} go-format.locStart - Function to get the start location of a node
|
||||
* @property {Function} go-format.locEnd - Function to get the end location of a node
|
||||
*/
|
||||
const parsers = {
|
||||
"go-format": {
|
||||
/**
|
||||
* Parse Go source code. For this plugin, we pass through the text as-is
|
||||
* since the actual formatting is handled by the Go WASM module.
|
||||
*
|
||||
* @param {string} text - The Go source code to parse
|
||||
* @returns {string} The input text unchanged
|
||||
*/
|
||||
parse: (text) => text,
|
||||
astFormat: "go-format",
|
||||
// These are required for Prettier to work
|
||||
/**
|
||||
* Get the start location of a node in the source code.
|
||||
*
|
||||
* @param {string} node - The node (in this case, the source text)
|
||||
* @returns {number} Always returns 0 as we treat the entire text as one node
|
||||
*/
|
||||
locStart: (node) => 0,
|
||||
/**
|
||||
* Get the end location of a node in the source code.
|
||||
*
|
||||
* @param {string} node - The node (in this case, the source text)
|
||||
* @returns {number} The length of the text
|
||||
*/
|
||||
locEnd: (node) => node.length,
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Prettier printer configuration for Go.
|
||||
* Defines how the parsed Go AST should be formatted back to text.
|
||||
*
|
||||
* @type {Object<string, Object>}
|
||||
* @property {Object} go-format - Go formatting printer configuration
|
||||
* @property {Function} go-format.print - Async function that formats Go code
|
||||
*/
|
||||
const printers = {
|
||||
"go-format": {
|
||||
/**
|
||||
* Format Go source code using the WebAssembly Go formatter.
|
||||
* This function initializes the WASM module if needed and calls the
|
||||
* global formatGo function exposed by the Go program.
|
||||
*
|
||||
* @async
|
||||
* @param {Object} path - Prettier's path object containing the source code
|
||||
* @param {Function} path.getValue - Function to get the current node value
|
||||
* @returns {Promise<string>} The formatted Go source code
|
||||
* @throws {Error} If the WASM module fails to initialize or format the code
|
||||
*/
|
||||
print: async (path) => {
|
||||
// The WASM module must be initialized before we can format.
|
||||
await initialize();
|
||||
const text = path.getValue();
|
||||
// The `formatGo` function is exposed on the global object by our Go program.
|
||||
return globalThis.formatGo(text);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default { languages, parsers, printers, initialize };
|
||||
10
frontend/src/common/prettier/plugins/go/gofmt.d.ts
vendored
Normal file
10
frontend/src/common/prettier/plugins/go/gofmt.d.ts
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
export type InitInput =
|
||||
| RequestInfo
|
||||
| URL
|
||||
| Response
|
||||
| BufferSource
|
||||
| WebAssembly.Module;
|
||||
|
||||
export default function initAsync(wasm_url?: InitInput): Promise<void>;
|
||||
export declare function initSync(module: BufferSource | WebAssembly.Module): void;
|
||||
export declare function format(input: string): string;
|
||||
355
frontend/src/common/prettier/plugins/go/gofmt.js
Normal file
355
frontend/src/common/prettier/plugins/go/gofmt.js
Normal file
@@ -0,0 +1,355 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
//
|
||||
// This file has been modified for use by the TinyGo compiler.
|
||||
|
||||
const encoder = new TextEncoder("utf-8");
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
let reinterpretBuf = new DataView(new ArrayBuffer(8));
|
||||
var logLine = [];
|
||||
|
||||
class TinyGo {
|
||||
constructor() {
|
||||
this._callbackTimeouts = new Map();
|
||||
this._nextCallbackTimeoutID = 1;
|
||||
|
||||
const mem = () => {
|
||||
// The buffer may change when requesting more memory.
|
||||
return new DataView(this._inst.exports.memory.buffer);
|
||||
}
|
||||
|
||||
const unboxValue = (v_ref) => {
|
||||
reinterpretBuf.setBigInt64(0, v_ref, true);
|
||||
const f = reinterpretBuf.getFloat64(0, true);
|
||||
if (f === 0) {
|
||||
return undefined;
|
||||
}
|
||||
if (!isNaN(f)) {
|
||||
return f;
|
||||
}
|
||||
|
||||
const id = v_ref & 0xffffffffn;
|
||||
return this._values[id];
|
||||
}
|
||||
|
||||
|
||||
const loadValue = (addr) => {
|
||||
let v_ref = mem().getBigUint64(addr, true);
|
||||
return unboxValue(v_ref);
|
||||
}
|
||||
|
||||
const boxValue = (v) => {
|
||||
const nanHead = 0x7FF80000n;
|
||||
|
||||
if (typeof v === "number") {
|
||||
if (isNaN(v)) {
|
||||
return nanHead << 32n;
|
||||
}
|
||||
if (v === 0) {
|
||||
return (nanHead << 32n) | 1n;
|
||||
}
|
||||
reinterpretBuf.setFloat64(0, v, true);
|
||||
return reinterpretBuf.getBigInt64(0, true);
|
||||
}
|
||||
|
||||
switch (v) {
|
||||
case undefined:
|
||||
return 0n;
|
||||
case null:
|
||||
return (nanHead << 32n) | 2n;
|
||||
case true:
|
||||
return (nanHead << 32n) | 3n;
|
||||
case false:
|
||||
return (nanHead << 32n) | 4n;
|
||||
}
|
||||
|
||||
let id = this._ids.get(v);
|
||||
if (id === undefined) {
|
||||
id = this._idPool.pop();
|
||||
if (id === undefined) {
|
||||
id = BigInt(this._values.length);
|
||||
}
|
||||
this._values[id] = v;
|
||||
this._goRefCounts[id] = 0;
|
||||
this._ids.set(v, id);
|
||||
}
|
||||
this._goRefCounts[id]++;
|
||||
let typeFlag = 1n;
|
||||
switch (typeof v) {
|
||||
case "string":
|
||||
typeFlag = 2n;
|
||||
break;
|
||||
case "symbol":
|
||||
typeFlag = 3n;
|
||||
break;
|
||||
case "function":
|
||||
typeFlag = 4n;
|
||||
break;
|
||||
}
|
||||
return id | ((nanHead | typeFlag) << 32n);
|
||||
}
|
||||
|
||||
const storeValue = (addr, v) => {
|
||||
let v_ref = boxValue(v);
|
||||
mem().setBigUint64(addr, v_ref, true);
|
||||
}
|
||||
|
||||
const loadSlice = (array, len, cap) => {
|
||||
return new Uint8Array(this._inst.exports.memory.buffer, array, len);
|
||||
}
|
||||
|
||||
const loadSliceOfValues = (array, len, cap) => {
|
||||
const a = new Array(len);
|
||||
for (let i = 0; i < len; i++) {
|
||||
a[i] = loadValue(array + i * 8);
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
const loadString = (ptr, len) => {
|
||||
return decoder.decode(new DataView(this._inst.exports.memory.buffer, ptr, len));
|
||||
}
|
||||
|
||||
const timeOrigin = Date.now() - performance.now();
|
||||
this.importObject = {
|
||||
wasi_snapshot_preview1: {
|
||||
// https://github.com/WebAssembly/WASI/blob/main/phases/snapshot/docs.md#fd_write
|
||||
fd_write: () => 0, // dummy
|
||||
},
|
||||
gojs: {
|
||||
// func ticks() float64
|
||||
"runtime.ticks": () => {
|
||||
return timeOrigin + performance.now();
|
||||
},
|
||||
|
||||
// func finalizeRef(v ref)
|
||||
"syscall/js.finalizeRef": (v_ref) => {
|
||||
reinterpretBuf.setBigInt64(0, v_ref, true);
|
||||
const f = reinterpretBuf.getFloat64(0, true);
|
||||
if (f === 0 || !isNaN(f)) {
|
||||
return;
|
||||
}
|
||||
const id = v_ref & 0xffffffffn;
|
||||
this._goRefCounts[id]--;
|
||||
if (this._goRefCounts[id] === 0) {
|
||||
const v = this._values[id];
|
||||
this._values[id] = null;
|
||||
this._ids.delete(v);
|
||||
this._idPool.push(id);
|
||||
}
|
||||
},
|
||||
|
||||
// func stringVal(value string) ref
|
||||
"syscall/js.stringVal": (value_ptr, value_len) => {
|
||||
const s = loadString(value_ptr, value_len);
|
||||
return boxValue(s);
|
||||
},
|
||||
|
||||
// func valueGet(v ref, p string) ref
|
||||
"syscall/js.valueGet": (v_ref, p_ptr, p_len) => {
|
||||
let prop = loadString(p_ptr, p_len);
|
||||
let v = unboxValue(v_ref);
|
||||
let result = Reflect.get(v, prop);
|
||||
return boxValue(result);
|
||||
},
|
||||
|
||||
// func valueSet(v ref, p string, x ref)
|
||||
"syscall/js.valueSet": (v_ref, p_ptr, p_len, x_ref) => {
|
||||
const v = unboxValue(v_ref);
|
||||
const p = loadString(p_ptr, p_len);
|
||||
const x = unboxValue(x_ref);
|
||||
Reflect.set(v, p, x);
|
||||
},
|
||||
|
||||
// func valueIndex(v ref, i int) ref
|
||||
"syscall/js.valueIndex": (v_ref, i) => {
|
||||
return boxValue(Reflect.get(unboxValue(v_ref), i));
|
||||
},
|
||||
|
||||
// valueSetIndex(v ref, i int, x ref)
|
||||
"syscall/js.valueSetIndex": (v_ref, i, x_ref) => {
|
||||
Reflect.set(unboxValue(v_ref), i, unboxValue(x_ref));
|
||||
},
|
||||
|
||||
// func valueCall(v ref, m string, args []ref) (ref, bool)
|
||||
"syscall/js.valueCall": (ret_addr, v_ref, m_ptr, m_len, args_ptr, args_len, args_cap) => {
|
||||
const v = unboxValue(v_ref);
|
||||
const name = loadString(m_ptr, m_len);
|
||||
const args = loadSliceOfValues(args_ptr, args_len, args_cap);
|
||||
try {
|
||||
const m = Reflect.get(v, name);
|
||||
storeValue(ret_addr, Reflect.apply(m, v, args));
|
||||
mem().setUint8(ret_addr + 8, 1);
|
||||
} catch (err) {
|
||||
storeValue(ret_addr, err);
|
||||
mem().setUint8(ret_addr + 8, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueNew(v ref, args []ref) (ref, bool)
|
||||
"syscall/js.valueNew": (ret_addr, v_ref, args_ptr, args_len, args_cap) => {
|
||||
const v = unboxValue(v_ref);
|
||||
const args = loadSliceOfValues(args_ptr, args_len, args_cap);
|
||||
try {
|
||||
storeValue(ret_addr, Reflect.construct(v, args));
|
||||
mem().setUint8(ret_addr + 8, 1);
|
||||
} catch (err) {
|
||||
storeValue(ret_addr, err);
|
||||
mem().setUint8(ret_addr+ 8, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueLength(v ref) int
|
||||
"syscall/js.valueLength": (v_ref) => {
|
||||
return unboxValue(v_ref).length;
|
||||
},
|
||||
|
||||
// valuePrepareString(v ref) (ref, int)
|
||||
"syscall/js.valuePrepareString": (ret_addr, v_ref) => {
|
||||
const s = String(unboxValue(v_ref));
|
||||
const str = encoder.encode(s);
|
||||
storeValue(ret_addr, str);
|
||||
mem().setInt32(ret_addr + 8, str.length, true);
|
||||
},
|
||||
|
||||
// valueLoadString(v ref, b []byte)
|
||||
"syscall/js.valueLoadString": (v_ref, slice_ptr, slice_len, slice_cap) => {
|
||||
const str = unboxValue(v_ref);
|
||||
loadSlice(slice_ptr, slice_len, slice_cap).set(str);
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
// Go 1.20 uses 'env'. Go 1.21 uses 'gojs'.
|
||||
// For compatibility, we use both as long as Go 1.20 is supported.
|
||||
this.importObject.env = this.importObject.gojs;
|
||||
}
|
||||
|
||||
async run(instance) {
|
||||
this._inst = instance;
|
||||
this._values = [ // JS values that Go currently has references to, indexed by reference id
|
||||
NaN,
|
||||
0,
|
||||
null,
|
||||
true,
|
||||
false,
|
||||
// fake global
|
||||
{
|
||||
set format(fn){ instance.format = fn; },
|
||||
Array,
|
||||
Object,
|
||||
},
|
||||
this,
|
||||
];
|
||||
this._goRefCounts = []; // number of references that Go has to a JS value, indexed by reference id
|
||||
this._ids = new Map(); // mapping from JS values to reference ids
|
||||
this._idPool = []; // unused ids that have been garbage collected
|
||||
this.exited = false; // whether the Go program has exited
|
||||
|
||||
while (true) {
|
||||
const callbackPromise = new Promise((resolve) => {
|
||||
this._resolveCallbackPromise = () => {
|
||||
if (this.exited) {
|
||||
throw new Error("bad callback: Go program has already exited");
|
||||
}
|
||||
setTimeout(resolve, 0); // make sure it is asynchronous
|
||||
};
|
||||
});
|
||||
this._inst.exports._start();
|
||||
if (this.exited) {
|
||||
break;
|
||||
}
|
||||
await callbackPromise;
|
||||
}
|
||||
}
|
||||
|
||||
_resume() {
|
||||
if (this.exited) {
|
||||
throw new Error("Go program has already exited");
|
||||
}
|
||||
this._inst.exports.resume();
|
||||
if (this.exited) {
|
||||
this._resolveExitPromise();
|
||||
}
|
||||
}
|
||||
|
||||
_makeFuncWrapper(id) {
|
||||
const go = this;
|
||||
return function () {
|
||||
const event = { id: id, this: this, args: arguments };
|
||||
go._pendingEvent = event;
|
||||
go._resume();
|
||||
return event.result;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ================== End of wasm_exec.js ==================
|
||||
*/
|
||||
/**/let wasm;
|
||||
/**/async function __load(module, imports) {
|
||||
/**/ if (typeof Response === 'function' && module instanceof Response) {
|
||||
/**/ if (typeof WebAssembly.instantiateStreaming === 'function') {
|
||||
/**/ try { return await WebAssembly.instantiateStreaming(module, imports); }
|
||||
/**/ catch (e) {
|
||||
/**/ if (module.headers.get('Content-Type') != 'application/wasm') {
|
||||
/**/ console.warn("`WebAssembly.instantiateStreaming` failed because your server does not serve wasm with `application/wasm` MIME type. Falling back to `WebAssembly.instantiate` which is slower. Original error:\n", e);
|
||||
/**/ } else { throw e; }
|
||||
/**/ }
|
||||
/**/ }
|
||||
/**/ const bytes = await module.arrayBuffer();
|
||||
/**/ return await WebAssembly.instantiate(bytes, imports);
|
||||
/**/ } else {
|
||||
/**/ const instance = await WebAssembly.instantiate(module, imports);
|
||||
/**/ if (instance instanceof WebAssembly.Instance) return { instance, module };
|
||||
/**/ else return instance;
|
||||
/**/ }
|
||||
/**/}
|
||||
/**/function __finalize_init(instance) {
|
||||
/**/ return wasm = instance;
|
||||
/**/}
|
||||
/**/function __init_memory(imports, maybe_memory) { }
|
||||
/**/export function initSync(module) {
|
||||
/**/ if (wasm !== undefined) return wasm;
|
||||
/**/
|
||||
/**/ const go = new TinyGo();
|
||||
/**/ const imports = go.importObject;
|
||||
/**/
|
||||
/**/ __init_memory(imports);
|
||||
/**/
|
||||
/**/ if (!(module instanceof WebAssembly.Module)) module = new WebAssembly.Module(module);
|
||||
/**/
|
||||
/**/ const instance = new WebAssembly.Instance(module, imports);
|
||||
/**/
|
||||
/**/ go.run(instance);
|
||||
/**/ return __finalize_init(instance, module);
|
||||
/**/}
|
||||
/**/export default async function initAsync(input) {
|
||||
/**/ if (wasm !== undefined) return wasm;
|
||||
/**/
|
||||
/**/ if (typeof input === 'undefined') input = new URL('gofmt.wasm', import.meta.url);
|
||||
/**/
|
||||
/**/ const go = new TinyGo();
|
||||
/**/ const imports = go.importObject;
|
||||
/**/
|
||||
/**/ if (typeof input === 'string' || (typeof Request === 'function' && input instanceof Request) || (typeof URL === 'function' && input instanceof URL)) {
|
||||
/**/ input = fetch(input);
|
||||
/**/ }
|
||||
/**/
|
||||
/**/ __init_memory(imports);
|
||||
/**/
|
||||
/**/ const { instance, module } = await __load(await input, imports);
|
||||
/**/
|
||||
/**/ go.run(instance);
|
||||
/**/ return __finalize_init(instance, module);
|
||||
/**/}
|
||||
/**/export function format(input) {
|
||||
/**/ const [err, result] = wasm.format(input);
|
||||
/**/ if (err) {
|
||||
/**/ throw new Error(result);
|
||||
/**/ }
|
||||
/**/ return result;
|
||||
/**/}
|
||||
/**/
|
||||
BIN
frontend/src/common/prettier/plugins/go/gofmt.wasm
Normal file
BIN
frontend/src/common/prettier/plugins/go/gofmt.wasm
Normal file
Binary file not shown.
10
frontend/src/common/prettier/plugins/go/gofmt_node.js
Normal file
10
frontend/src/common/prettier/plugins/go/gofmt_node.js
Normal file
@@ -0,0 +1,10 @@
|
||||
import fs from "node:fs/promises";
|
||||
import initAsync from "./gofmt.js";
|
||||
|
||||
const wasm = new URL("./gofmt.wasm", import.meta.url);
|
||||
|
||||
export default function (init = fs.readFile(wasm)) {
|
||||
return initAsync(init);
|
||||
}
|
||||
|
||||
export * from "./gofmt.js";
|
||||
8
frontend/src/common/prettier/plugins/go/gofmt_vite.js
Normal file
8
frontend/src/common/prettier/plugins/go/gofmt_vite.js
Normal file
@@ -0,0 +1,8 @@
|
||||
import initAsync from "./gofmt.js";
|
||||
import wasm_url from "./gofmt.wasm?url";
|
||||
|
||||
export default function (input = wasm_url) {
|
||||
return initAsync(input);
|
||||
}
|
||||
|
||||
export * from "./gofmt.js";
|
||||
101
frontend/src/common/prettier/plugins/go/index.ts
Normal file
101
frontend/src/common/prettier/plugins/go/index.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* Prettier Plugin for Go formatting using gofmt WebAssembly
|
||||
*
|
||||
* This plugin provides support for formatting Go files using the gofmt WASM implementation.
|
||||
*/
|
||||
import type { Plugin, Parser, Printer } from 'prettier';
|
||||
|
||||
// Import the gofmt WASM module
|
||||
import gofmtInit, { format } from './gofmt_vite.js';
|
||||
|
||||
const parserName = 'go';
|
||||
|
||||
// Language configuration
|
||||
const languages = [
|
||||
{
|
||||
name: 'Go',
|
||||
aliases: ['go', 'golang'],
|
||||
parsers: [parserName],
|
||||
extensions: ['.go'],
|
||||
aceMode: 'golang',
|
||||
tmScope: 'source.go',
|
||||
linguistLanguageId: 132,
|
||||
vscodeLanguageIds: ['go']
|
||||
}
|
||||
];
|
||||
|
||||
// Parser configuration
|
||||
const goParser: Parser<string> = {
|
||||
astFormat: parserName,
|
||||
parse: (text: string) => text,
|
||||
locStart: () => 0,
|
||||
locEnd: (node: string) => node.length,
|
||||
};
|
||||
|
||||
// Lazy initialize gofmt WASM module
|
||||
let initPromise: Promise<void> | null = null;
|
||||
let isInitialized = false;
|
||||
|
||||
function initGofmt(): Promise<void> {
|
||||
if (isInitialized) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (!initPromise) {
|
||||
initPromise = (async () => {
|
||||
try {
|
||||
await gofmtInit();
|
||||
isInitialized = true;
|
||||
} catch (error) {
|
||||
console.warn('Failed to initialize gofmt WASM module:', error);
|
||||
initPromise = null;
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
// Printer configuration
|
||||
const goPrinter: Printer<string> = {
|
||||
// @ts-expect-error -- Support async printer like shell plugin
|
||||
async print(path, options) {
|
||||
try {
|
||||
// Wait for initialization to complete
|
||||
await initGofmt();
|
||||
|
||||
const text = (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
|
||||
// Format using gofmt (synchronous call)
|
||||
const formatted = format(text);
|
||||
|
||||
return formatted.trim();
|
||||
} catch (error) {
|
||||
console.warn('Go formatting failed:', error);
|
||||
// Return original text if formatting fails
|
||||
return (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Plugin options (Go doesn't need additional config options)
|
||||
const options = {};
|
||||
|
||||
// Plugin definition
|
||||
const goPlugin: Plugin = {
|
||||
languages,
|
||||
parsers: {
|
||||
[parserName]: goParser,
|
||||
},
|
||||
printers: {
|
||||
[parserName]: goPrinter,
|
||||
},
|
||||
options,
|
||||
};
|
||||
|
||||
// Export plugin without auto-initialization
|
||||
export default goPlugin;
|
||||
export { languages, initGofmt as initialize };
|
||||
export const parsers = goPlugin.parsers;
|
||||
export const printers = goPlugin.printers;
|
||||
@@ -1,31 +0,0 @@
|
||||
@echo off
|
||||
rem Build script for Go Prettier Plugin WASM using TinyGo
|
||||
rem This script compiles the Go code to WebAssembly for browser environment
|
||||
|
||||
echo Building Go Prettier Plugin WASM with TinyGo...
|
||||
|
||||
rem Check if TinyGo is available
|
||||
tinygo version >nul 2>&1
|
||||
if errorlevel 1 (
|
||||
echo TinyGo not found! Please install TinyGo first.
|
||||
echo Visit: https://tinygo.org/getting-started/install/
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
rem Display TinyGo version
|
||||
echo Using TinyGo version:
|
||||
tinygo version
|
||||
|
||||
rem Build the WASM file using TinyGo
|
||||
echo Compiling main.go to go.wasm with TinyGo...
|
||||
tinygo build -o go-format.wasm -target wasm main.go
|
||||
if errorlevel 1 (
|
||||
echo Build failed!
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo Build successful!
|
||||
|
||||
echo Go Prettier Plugin WASM (TinyGo) is ready!
|
||||
@@ -1,29 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Build script for Go Prettier Plugin WASM using TinyGo
|
||||
# This script compiles the Go code to WebAssembly for browser environment
|
||||
|
||||
echo "Building Go Prettier Plugin WASM with TinyGo..."
|
||||
|
||||
# Check if TinyGo is available
|
||||
if ! command -v tinygo &> /dev/null; then
|
||||
echo "TinyGo not found! Please install TinyGo first."
|
||||
echo "Visit: https://tinygo.org/getting-started/install/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Display TinyGo version
|
||||
echo "Using TinyGo version: $(tinygo version)"
|
||||
|
||||
# Build the WASM file using TinyGo
|
||||
echo "Compiling main.go to go.wasm with TinyGo..."
|
||||
tinygo build -o go-format.wasm -target wasm main.go
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Build failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Build successful!"
|
||||
|
||||
echo "Go Prettier Plugin WASM (TinyGo) is ready!"
|
||||
@@ -1,32 +0,0 @@
|
||||
@echo off
|
||||
rem Build script for Go Prettier Plugin WASM using native Go
|
||||
rem This script compiles the Go code to WebAssembly for browser environment
|
||||
|
||||
echo Building Go Prettier Plugin WASM with native Go...
|
||||
|
||||
rem Check if Go is available
|
||||
go version >nul 2>&1
|
||||
if %ERRORLEVEL% NEQ 0 (
|
||||
echo Go not found! Please install Go 1.21+ first.
|
||||
echo Visit: https://golang.org/dl/
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
rem Set WASM build environment for browser (js/wasm)
|
||||
set GOOS=js
|
||||
set GOARCH=wasm
|
||||
|
||||
rem Build the WASM file using native Go
|
||||
echo Compiling main.go to go.wasm with Go...
|
||||
go build -o go-format.wasm main.go
|
||||
|
||||
if %ERRORLEVEL% EQU 0 (
|
||||
echo Build successful!
|
||||
|
||||
echo Go Prettier Plugin WASM is ready!
|
||||
) else (
|
||||
echo Build failed!
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
@@ -1,33 +1,9 @@
|
||||
#!/bin/bash
|
||||
set -Eeo pipefail
|
||||
|
||||
# Build script for Go Prettier Plugin WASM using native Go
|
||||
# This script compiles the Go code to WebAssembly for browser environment
|
||||
cd $(dirname $0)
|
||||
|
||||
echo "Building Go Prettier Plugin WASM with native Go..."
|
||||
echo "Building..."
|
||||
tinygo build -o=../gofmt.wasm -target=wasm -no-debug ./lib.go
|
||||
|
||||
# Check if Go is available
|
||||
if ! command -v go &> /dev/null; then
|
||||
echo "Go not found! Please install Go 1.21+ first."
|
||||
echo "Visit: https://golang.org/dl/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Display Go version
|
||||
echo "Using Go version: $(go version)"
|
||||
|
||||
# Set WASM build environment for browser (js/wasm)
|
||||
export GOOS=js
|
||||
export GOARCH=wasm
|
||||
|
||||
# Build the WASM file using native Go
|
||||
echo "Compiling main.go to go.wasm with Go..."
|
||||
go build -o go-format.wasm main.go
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Build successful!"
|
||||
|
||||
echo "Go Prettier Plugin WASM is ready!"
|
||||
else
|
||||
echo "Build failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Generating JS..."
|
||||
cp $(tinygo env TINYGOROOT)/targets/wasm_exec.js ../gofmt.js
|
||||
|
||||
23
frontend/src/common/prettier/plugins/go/src/lib.go
Normal file
23
frontend/src/common/prettier/plugins/go/src/lib.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/format"
|
||||
"syscall/js"
|
||||
)
|
||||
|
||||
func Format(this js.Value, args []js.Value) any {
|
||||
input := ([]byte)(args[0].String())
|
||||
|
||||
output, err := format.Source(input)
|
||||
if err != nil {
|
||||
return []any{true, err.Error()}
|
||||
}
|
||||
|
||||
return []any{false, string(output)}
|
||||
}
|
||||
|
||||
func main() {
|
||||
done := make(chan bool)
|
||||
js.Global().Set("format", js.FuncOf(Format))
|
||||
<-done
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
//go:build js && wasm
|
||||
|
||||
// Package main implements a WebAssembly module that provides Go code formatting
|
||||
// functionality for the Prettier plugin. This package exposes the formatGo function
|
||||
// to JavaScript, enabling web-based Go code formatting using Go's built-in format package.
|
||||
//
|
||||
// The module is designed to be compiled to WebAssembly using native Go (GOOS=js GOARCH=wasm)
|
||||
// and loaded in browser environments as part of the Go Prettier plugin.
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/format"
|
||||
"syscall/js"
|
||||
)
|
||||
|
||||
// formatGo is a JavaScript-callable function that formats Go source code.
|
||||
// It wraps the standard library's go/format.Source function to be accessible
|
||||
// from JavaScript environments through WebAssembly.
|
||||
//
|
||||
// Parameters:
|
||||
// - this: The JavaScript 'this' context (unused)
|
||||
// - i: JavaScript arguments array where i[0] should contain the Go source code as a string
|
||||
//
|
||||
// Returns:
|
||||
// - js.Value: The formatted Go source code as a JavaScript string value
|
||||
// - If formatting fails due to syntax errors, returns the original code unchanged
|
||||
// - If no arguments are provided, returns js.Null() and logs an error
|
||||
//
|
||||
// The function handles syntax errors gracefully by returning the original code
|
||||
// and logging error details to the JavaScript console for debugging purposes.
|
||||
func formatGo(this js.Value, i []js.Value) interface{} {
|
||||
if len(i) == 0 {
|
||||
js.Global().Get("console").Call("error", "formatGo: missing code argument")
|
||||
return js.Null()
|
||||
}
|
||||
code := i[0].String()
|
||||
formatted, err := format.Source([]byte(code))
|
||||
if err != nil {
|
||||
// In case of a syntax error in the Go code, go/format returns an error.
|
||||
// Prettier expects the original text to be returned in case of an error.
|
||||
// We also log the error to the console for debugging purposes.
|
||||
js.Global().Get("console").Call("error", "Error formatting Go code:", err.Error())
|
||||
return js.ValueOf(code)
|
||||
}
|
||||
return js.ValueOf(string(formatted))
|
||||
}
|
||||
|
||||
// main initializes the WebAssembly module and exposes the formatGo function
|
||||
// to the JavaScript global scope. The function sets up a blocking channel
|
||||
// to prevent the WASM module from exiting, allowing it to serve as a
|
||||
// long-running service for formatting operations.
|
||||
//
|
||||
// The exposed formatGo function can be called from JavaScript as:
|
||||
//
|
||||
// global.formatGo(sourceCode)
|
||||
func main() {
|
||||
// Create a channel to keep the Go program running.
|
||||
// This is necessary because the WASM module would exit otherwise.
|
||||
c := make(chan struct{}, 0)
|
||||
|
||||
// Expose the formatGo function to the JavaScript global scope.
|
||||
js.Global().Set("formatGo", js.FuncOf(formatGo))
|
||||
|
||||
// Block forever
|
||||
<-c
|
||||
}
|
||||
@@ -1,553 +0,0 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
//
|
||||
// This file has been modified for use by the TinyGo compiler.
|
||||
|
||||
(() => {
|
||||
// Map multiple JavaScript environments to a single common API,
|
||||
// preferring web standards over Node.js API.
|
||||
//
|
||||
// Environments considered:
|
||||
// - Browsers
|
||||
// - Node.js
|
||||
// - Electron
|
||||
// - Parcel
|
||||
|
||||
if (typeof global !== "undefined") {
|
||||
// global already exists
|
||||
} else if (typeof window !== "undefined") {
|
||||
window.global = window;
|
||||
} else if (typeof self !== "undefined") {
|
||||
self.global = self;
|
||||
} else {
|
||||
throw new Error("cannot export Go (neither global, window nor self is defined)");
|
||||
}
|
||||
|
||||
if (!global.require && typeof require !== "undefined") {
|
||||
global.require = require;
|
||||
}
|
||||
|
||||
if (!global.fs && global.require) {
|
||||
global.fs = require("node:fs");
|
||||
}
|
||||
|
||||
const enosys = () => {
|
||||
const err = new Error("not implemented");
|
||||
err.code = "ENOSYS";
|
||||
return err;
|
||||
};
|
||||
|
||||
if (!global.fs) {
|
||||
let outputBuf = "";
|
||||
global.fs = {
|
||||
constants: { O_WRONLY: -1, O_RDWR: -1, O_CREAT: -1, O_TRUNC: -1, O_APPEND: -1, O_EXCL: -1 }, // unused
|
||||
writeSync(fd, buf) {
|
||||
outputBuf += decoder.decode(buf);
|
||||
const nl = outputBuf.lastIndexOf("\n");
|
||||
if (nl != -1) {
|
||||
console.log(outputBuf.substr(0, nl));
|
||||
outputBuf = outputBuf.substr(nl + 1);
|
||||
}
|
||||
return buf.length;
|
||||
},
|
||||
write(fd, buf, offset, length, position, callback) {
|
||||
if (offset !== 0 || length !== buf.length || position !== null) {
|
||||
callback(enosys());
|
||||
return;
|
||||
}
|
||||
const n = this.writeSync(fd, buf);
|
||||
callback(null, n);
|
||||
},
|
||||
chmod(path, mode, callback) { callback(enosys()); },
|
||||
chown(path, uid, gid, callback) { callback(enosys()); },
|
||||
close(fd, callback) { callback(enosys()); },
|
||||
fchmod(fd, mode, callback) { callback(enosys()); },
|
||||
fchown(fd, uid, gid, callback) { callback(enosys()); },
|
||||
fstat(fd, callback) { callback(enosys()); },
|
||||
fsync(fd, callback) { callback(null); },
|
||||
ftruncate(fd, length, callback) { callback(enosys()); },
|
||||
lchown(path, uid, gid, callback) { callback(enosys()); },
|
||||
link(path, link, callback) { callback(enosys()); },
|
||||
lstat(path, callback) { callback(enosys()); },
|
||||
mkdir(path, perm, callback) { callback(enosys()); },
|
||||
open(path, flags, mode, callback) { callback(enosys()); },
|
||||
read(fd, buffer, offset, length, position, callback) { callback(enosys()); },
|
||||
readdir(path, callback) { callback(enosys()); },
|
||||
readlink(path, callback) { callback(enosys()); },
|
||||
rename(from, to, callback) { callback(enosys()); },
|
||||
rmdir(path, callback) { callback(enosys()); },
|
||||
stat(path, callback) { callback(enosys()); },
|
||||
symlink(path, link, callback) { callback(enosys()); },
|
||||
truncate(path, length, callback) { callback(enosys()); },
|
||||
unlink(path, callback) { callback(enosys()); },
|
||||
utimes(path, atime, mtime, callback) { callback(enosys()); },
|
||||
};
|
||||
}
|
||||
|
||||
if (!global.process) {
|
||||
global.process = {
|
||||
getuid() { return -1; },
|
||||
getgid() { return -1; },
|
||||
geteuid() { return -1; },
|
||||
getegid() { return -1; },
|
||||
getgroups() { throw enosys(); },
|
||||
pid: -1,
|
||||
ppid: -1,
|
||||
umask() { throw enosys(); },
|
||||
cwd() { throw enosys(); },
|
||||
chdir() { throw enosys(); },
|
||||
}
|
||||
}
|
||||
|
||||
if (!global.crypto) {
|
||||
const nodeCrypto = require("node:crypto");
|
||||
global.crypto = {
|
||||
getRandomValues(b) {
|
||||
nodeCrypto.randomFillSync(b);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!global.performance) {
|
||||
global.performance = {
|
||||
now() {
|
||||
const [sec, nsec] = process.hrtime();
|
||||
return sec * 1000 + nsec / 1000000;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!global.TextEncoder) {
|
||||
global.TextEncoder = require("node:util").TextEncoder;
|
||||
}
|
||||
|
||||
if (!global.TextDecoder) {
|
||||
global.TextDecoder = require("node:util").TextDecoder;
|
||||
}
|
||||
|
||||
// End of polyfills for common API.
|
||||
|
||||
const encoder = new TextEncoder("utf-8");
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
let reinterpretBuf = new DataView(new ArrayBuffer(8));
|
||||
var logLine = [];
|
||||
const wasmExit = {}; // thrown to exit via proc_exit (not an error)
|
||||
|
||||
global.TinyGo = class {
|
||||
constructor() {
|
||||
this._callbackTimeouts = new Map();
|
||||
this._nextCallbackTimeoutID = 1;
|
||||
|
||||
const mem = () => {
|
||||
// The buffer may change when requesting more memory.
|
||||
return new DataView(this._inst.exports.memory.buffer);
|
||||
}
|
||||
|
||||
const unboxValue = (v_ref) => {
|
||||
reinterpretBuf.setBigInt64(0, v_ref, true);
|
||||
const f = reinterpretBuf.getFloat64(0, true);
|
||||
if (f === 0) {
|
||||
return undefined;
|
||||
}
|
||||
if (!isNaN(f)) {
|
||||
return f;
|
||||
}
|
||||
|
||||
const id = v_ref & 0xffffffffn;
|
||||
return this._values[id];
|
||||
}
|
||||
|
||||
|
||||
const loadValue = (addr) => {
|
||||
let v_ref = mem().getBigUint64(addr, true);
|
||||
return unboxValue(v_ref);
|
||||
}
|
||||
|
||||
const boxValue = (v) => {
|
||||
const nanHead = 0x7FF80000n;
|
||||
|
||||
if (typeof v === "number") {
|
||||
if (isNaN(v)) {
|
||||
return nanHead << 32n;
|
||||
}
|
||||
if (v === 0) {
|
||||
return (nanHead << 32n) | 1n;
|
||||
}
|
||||
reinterpretBuf.setFloat64(0, v, true);
|
||||
return reinterpretBuf.getBigInt64(0, true);
|
||||
}
|
||||
|
||||
switch (v) {
|
||||
case undefined:
|
||||
return 0n;
|
||||
case null:
|
||||
return (nanHead << 32n) | 2n;
|
||||
case true:
|
||||
return (nanHead << 32n) | 3n;
|
||||
case false:
|
||||
return (nanHead << 32n) | 4n;
|
||||
}
|
||||
|
||||
let id = this._ids.get(v);
|
||||
if (id === undefined) {
|
||||
id = this._idPool.pop();
|
||||
if (id === undefined) {
|
||||
id = BigInt(this._values.length);
|
||||
}
|
||||
this._values[id] = v;
|
||||
this._goRefCounts[id] = 0;
|
||||
this._ids.set(v, id);
|
||||
}
|
||||
this._goRefCounts[id]++;
|
||||
let typeFlag = 1n;
|
||||
switch (typeof v) {
|
||||
case "string":
|
||||
typeFlag = 2n;
|
||||
break;
|
||||
case "symbol":
|
||||
typeFlag = 3n;
|
||||
break;
|
||||
case "function":
|
||||
typeFlag = 4n;
|
||||
break;
|
||||
}
|
||||
return id | ((nanHead | typeFlag) << 32n);
|
||||
}
|
||||
|
||||
const storeValue = (addr, v) => {
|
||||
let v_ref = boxValue(v);
|
||||
mem().setBigUint64(addr, v_ref, true);
|
||||
}
|
||||
|
||||
const loadSlice = (array, len, cap) => {
|
||||
return new Uint8Array(this._inst.exports.memory.buffer, array, len);
|
||||
}
|
||||
|
||||
const loadSliceOfValues = (array, len, cap) => {
|
||||
const a = new Array(len);
|
||||
for (let i = 0; i < len; i++) {
|
||||
a[i] = loadValue(array + i * 8);
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
const loadString = (ptr, len) => {
|
||||
return decoder.decode(new DataView(this._inst.exports.memory.buffer, ptr, len));
|
||||
}
|
||||
|
||||
const timeOrigin = Date.now() - performance.now();
|
||||
this.importObject = {
|
||||
wasi_snapshot_preview1: {
|
||||
// https://github.com/WebAssembly/WASI/blob/main/phases/snapshot/docs.md#fd_write
|
||||
fd_write: function(fd, iovs_ptr, iovs_len, nwritten_ptr) {
|
||||
let nwritten = 0;
|
||||
if (fd == 1) {
|
||||
for (let iovs_i=0; iovs_i<iovs_len;iovs_i++) {
|
||||
let iov_ptr = iovs_ptr+iovs_i*8; // assuming wasm32
|
||||
let ptr = mem().getUint32(iov_ptr + 0, true);
|
||||
let len = mem().getUint32(iov_ptr + 4, true);
|
||||
nwritten += len;
|
||||
for (let i=0; i<len; i++) {
|
||||
let c = mem().getUint8(ptr+i);
|
||||
if (c == 13) { // CR
|
||||
// ignore
|
||||
} else if (c == 10) { // LF
|
||||
// write line
|
||||
let line = decoder.decode(new Uint8Array(logLine));
|
||||
logLine = [];
|
||||
console.log(line);
|
||||
} else {
|
||||
logLine.push(c);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.error('invalid file descriptor:', fd);
|
||||
}
|
||||
mem().setUint32(nwritten_ptr, nwritten, true);
|
||||
return 0;
|
||||
},
|
||||
fd_close: () => 0, // dummy
|
||||
fd_fdstat_get: () => 0, // dummy
|
||||
fd_seek: () => 0, // dummy
|
||||
proc_exit: (code) => {
|
||||
this.exited = true;
|
||||
this.exitCode = code;
|
||||
this._resolveExitPromise();
|
||||
throw wasmExit;
|
||||
},
|
||||
random_get: (bufPtr, bufLen) => {
|
||||
crypto.getRandomValues(loadSlice(bufPtr, bufLen));
|
||||
return 0;
|
||||
},
|
||||
},
|
||||
gojs: {
|
||||
// func ticks() int64
|
||||
"runtime.ticks": () => {
|
||||
return BigInt((timeOrigin + performance.now()) * 1e6);
|
||||
},
|
||||
|
||||
// func sleepTicks(timeout int64)
|
||||
"runtime.sleepTicks": (timeout) => {
|
||||
// Do not sleep, only reactivate scheduler after the given timeout.
|
||||
setTimeout(() => {
|
||||
if (this.exited) return;
|
||||
try {
|
||||
this._inst.exports.go_scheduler();
|
||||
} catch (e) {
|
||||
if (e !== wasmExit) throw e;
|
||||
}
|
||||
}, Number(timeout)/1e6);
|
||||
},
|
||||
|
||||
// func finalizeRef(v ref)
|
||||
"syscall/js.finalizeRef": (v_ref) => {
|
||||
// Note: TinyGo does not support finalizers so this is only called
|
||||
// for one specific case, by js.go:jsString. and can/might leak memory.
|
||||
const id = v_ref & 0xffffffffn;
|
||||
if (this._goRefCounts?.[id] !== undefined) {
|
||||
this._goRefCounts[id]--;
|
||||
if (this._goRefCounts[id] === 0) {
|
||||
const v = this._values[id];
|
||||
this._values[id] = null;
|
||||
this._ids.delete(v);
|
||||
this._idPool.push(id);
|
||||
}
|
||||
} else {
|
||||
console.error("syscall/js.finalizeRef: unknown id", id);
|
||||
}
|
||||
},
|
||||
|
||||
// func stringVal(value string) ref
|
||||
"syscall/js.stringVal": (value_ptr, value_len) => {
|
||||
value_ptr >>>= 0;
|
||||
const s = loadString(value_ptr, value_len);
|
||||
return boxValue(s);
|
||||
},
|
||||
|
||||
// func valueGet(v ref, p string) ref
|
||||
"syscall/js.valueGet": (v_ref, p_ptr, p_len) => {
|
||||
let prop = loadString(p_ptr, p_len);
|
||||
let v = unboxValue(v_ref);
|
||||
let result = Reflect.get(v, prop);
|
||||
return boxValue(result);
|
||||
},
|
||||
|
||||
// func valueSet(v ref, p string, x ref)
|
||||
"syscall/js.valueSet": (v_ref, p_ptr, p_len, x_ref) => {
|
||||
const v = unboxValue(v_ref);
|
||||
const p = loadString(p_ptr, p_len);
|
||||
const x = unboxValue(x_ref);
|
||||
Reflect.set(v, p, x);
|
||||
},
|
||||
|
||||
// func valueDelete(v ref, p string)
|
||||
"syscall/js.valueDelete": (v_ref, p_ptr, p_len) => {
|
||||
const v = unboxValue(v_ref);
|
||||
const p = loadString(p_ptr, p_len);
|
||||
Reflect.deleteProperty(v, p);
|
||||
},
|
||||
|
||||
// func valueIndex(v ref, i int) ref
|
||||
"syscall/js.valueIndex": (v_ref, i) => {
|
||||
return boxValue(Reflect.get(unboxValue(v_ref), i));
|
||||
},
|
||||
|
||||
// valueSetIndex(v ref, i int, x ref)
|
||||
"syscall/js.valueSetIndex": (v_ref, i, x_ref) => {
|
||||
Reflect.set(unboxValue(v_ref), i, unboxValue(x_ref));
|
||||
},
|
||||
|
||||
// func valueCall(v ref, m string, args []ref) (ref, bool)
|
||||
"syscall/js.valueCall": (ret_addr, v_ref, m_ptr, m_len, args_ptr, args_len, args_cap) => {
|
||||
const v = unboxValue(v_ref);
|
||||
const name = loadString(m_ptr, m_len);
|
||||
const args = loadSliceOfValues(args_ptr, args_len, args_cap);
|
||||
try {
|
||||
const m = Reflect.get(v, name);
|
||||
storeValue(ret_addr, Reflect.apply(m, v, args));
|
||||
mem().setUint8(ret_addr + 8, 1);
|
||||
} catch (err) {
|
||||
storeValue(ret_addr, err);
|
||||
mem().setUint8(ret_addr + 8, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueInvoke(v ref, args []ref) (ref, bool)
|
||||
"syscall/js.valueInvoke": (ret_addr, v_ref, args_ptr, args_len, args_cap) => {
|
||||
try {
|
||||
const v = unboxValue(v_ref);
|
||||
const args = loadSliceOfValues(args_ptr, args_len, args_cap);
|
||||
storeValue(ret_addr, Reflect.apply(v, undefined, args));
|
||||
mem().setUint8(ret_addr + 8, 1);
|
||||
} catch (err) {
|
||||
storeValue(ret_addr, err);
|
||||
mem().setUint8(ret_addr + 8, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueNew(v ref, args []ref) (ref, bool)
|
||||
"syscall/js.valueNew": (ret_addr, v_ref, args_ptr, args_len, args_cap) => {
|
||||
const v = unboxValue(v_ref);
|
||||
const args = loadSliceOfValues(args_ptr, args_len, args_cap);
|
||||
try {
|
||||
storeValue(ret_addr, Reflect.construct(v, args));
|
||||
mem().setUint8(ret_addr + 8, 1);
|
||||
} catch (err) {
|
||||
storeValue(ret_addr, err);
|
||||
mem().setUint8(ret_addr+ 8, 0);
|
||||
}
|
||||
},
|
||||
|
||||
// func valueLength(v ref) int
|
||||
"syscall/js.valueLength": (v_ref) => {
|
||||
return unboxValue(v_ref).length;
|
||||
},
|
||||
|
||||
// valuePrepareString(v ref) (ref, int)
|
||||
"syscall/js.valuePrepareString": (ret_addr, v_ref) => {
|
||||
const s = String(unboxValue(v_ref));
|
||||
const str = encoder.encode(s);
|
||||
storeValue(ret_addr, str);
|
||||
mem().setInt32(ret_addr + 8, str.length, true);
|
||||
},
|
||||
|
||||
// valueLoadString(v ref, b []byte)
|
||||
"syscall/js.valueLoadString": (v_ref, slice_ptr, slice_len, slice_cap) => {
|
||||
const str = unboxValue(v_ref);
|
||||
loadSlice(slice_ptr, slice_len, slice_cap).set(str);
|
||||
},
|
||||
|
||||
// func valueInstanceOf(v ref, t ref) bool
|
||||
"syscall/js.valueInstanceOf": (v_ref, t_ref) => {
|
||||
return unboxValue(v_ref) instanceof unboxValue(t_ref);
|
||||
},
|
||||
|
||||
// func copyBytesToGo(dst []byte, src ref) (int, bool)
|
||||
"syscall/js.copyBytesToGo": (ret_addr, dest_addr, dest_len, dest_cap, src_ref) => {
|
||||
let num_bytes_copied_addr = ret_addr;
|
||||
let returned_status_addr = ret_addr + 4; // Address of returned boolean status variable
|
||||
|
||||
const dst = loadSlice(dest_addr, dest_len);
|
||||
const src = unboxValue(src_ref);
|
||||
if (!(src instanceof Uint8Array || src instanceof Uint8ClampedArray)) {
|
||||
mem().setUint8(returned_status_addr, 0); // Return "not ok" status
|
||||
return;
|
||||
}
|
||||
const toCopy = src.subarray(0, dst.length);
|
||||
dst.set(toCopy);
|
||||
mem().setUint32(num_bytes_copied_addr, toCopy.length, true);
|
||||
mem().setUint8(returned_status_addr, 1); // Return "ok" status
|
||||
},
|
||||
|
||||
// copyBytesToJS(dst ref, src []byte) (int, bool)
|
||||
// Originally copied from upstream Go project, then modified:
|
||||
// https://github.com/golang/go/blob/3f995c3f3b43033013013e6c7ccc93a9b1411ca9/misc/wasm/wasm_exec.js#L404-L416
|
||||
"syscall/js.copyBytesToJS": (ret_addr, dst_ref, src_addr, src_len, src_cap) => {
|
||||
let num_bytes_copied_addr = ret_addr;
|
||||
let returned_status_addr = ret_addr + 4; // Address of returned boolean status variable
|
||||
|
||||
const dst = unboxValue(dst_ref);
|
||||
const src = loadSlice(src_addr, src_len);
|
||||
if (!(dst instanceof Uint8Array || dst instanceof Uint8ClampedArray)) {
|
||||
mem().setUint8(returned_status_addr, 0); // Return "not ok" status
|
||||
return;
|
||||
}
|
||||
const toCopy = src.subarray(0, dst.length);
|
||||
dst.set(toCopy);
|
||||
mem().setUint32(num_bytes_copied_addr, toCopy.length, true);
|
||||
mem().setUint8(returned_status_addr, 1); // Return "ok" status
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
// Go 1.20 uses 'env'. Go 1.21 uses 'gojs'.
|
||||
// For compatibility, we use both as long as Go 1.20 is supported.
|
||||
this.importObject.env = this.importObject.gojs;
|
||||
}
|
||||
|
||||
async run(instance) {
|
||||
this._inst = instance;
|
||||
this._values = [ // JS values that Go currently has references to, indexed by reference id
|
||||
NaN,
|
||||
0,
|
||||
null,
|
||||
true,
|
||||
false,
|
||||
global,
|
||||
this,
|
||||
];
|
||||
this._goRefCounts = []; // number of references that Go has to a JS value, indexed by reference id
|
||||
this._ids = new Map(); // mapping from JS values to reference ids
|
||||
this._idPool = []; // unused ids that have been garbage collected
|
||||
this.exited = false; // whether the Go program has exited
|
||||
this.exitCode = 0;
|
||||
|
||||
if (this._inst.exports._start) {
|
||||
let exitPromise = new Promise((resolve, reject) => {
|
||||
this._resolveExitPromise = resolve;
|
||||
});
|
||||
|
||||
// Run program, but catch the wasmExit exception that's thrown
|
||||
// to return back here.
|
||||
try {
|
||||
this._inst.exports._start();
|
||||
} catch (e) {
|
||||
if (e !== wasmExit) throw e;
|
||||
}
|
||||
|
||||
await exitPromise;
|
||||
return this.exitCode;
|
||||
} else {
|
||||
this._inst.exports._initialize();
|
||||
}
|
||||
}
|
||||
|
||||
_resume() {
|
||||
if (this.exited) {
|
||||
throw new Error("Go program has already exited");
|
||||
}
|
||||
try {
|
||||
this._inst.exports.resume();
|
||||
} catch (e) {
|
||||
if (e !== wasmExit) throw e;
|
||||
}
|
||||
if (this.exited) {
|
||||
this._resolveExitPromise();
|
||||
}
|
||||
}
|
||||
|
||||
_makeFuncWrapper(id) {
|
||||
const go = this;
|
||||
return function () {
|
||||
const event = { id: id, this: this, args: arguments };
|
||||
go._pendingEvent = event;
|
||||
go._resume();
|
||||
return event.result;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
global.require &&
|
||||
global.require.main === module &&
|
||||
global.process &&
|
||||
global.process.versions &&
|
||||
!global.process.versions.electron
|
||||
) {
|
||||
if (process.argv.length != 3) {
|
||||
console.error("usage: go_js_wasm_exec [wasm binary] [arguments]");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const go = new Go();
|
||||
WebAssembly.instantiate(fs.readFileSync(process.argv[2]), go.importObject).then(async (result) => {
|
||||
let exitCode = await go.run(result.instance);
|
||||
process.exit(exitCode);
|
||||
}).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
})();
|
||||
@@ -33,33 +33,38 @@ const luaParser: Parser<string> = {
|
||||
locEnd: (node: string) => node.length,
|
||||
};
|
||||
|
||||
// Initialize StyLua WASM module
|
||||
// Lazy initialize StyLua WASM module
|
||||
let initPromise: Promise<void> | null = null;
|
||||
let isInitialized = false;
|
||||
|
||||
function initStyLua(): Promise<void> {
|
||||
if (initPromise) {
|
||||
return initPromise;
|
||||
if (isInitialized) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
initPromise = (async () => {
|
||||
if (!isInitialized) {
|
||||
await luaInit();
|
||||
isInitialized = true;
|
||||
}
|
||||
})();
|
||||
if (!initPromise) {
|
||||
initPromise = (async () => {
|
||||
try {
|
||||
await luaInit();
|
||||
isInitialized = true;
|
||||
} catch (error) {
|
||||
console.warn('Failed to initialize StyLua WASM module:', error);
|
||||
initPromise = null;
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
// Printer configuration
|
||||
const luaPrinter: Printer<string> = {
|
||||
print: (path, options) => {
|
||||
// @ts-expect-error -- Support async printer like shell plugin
|
||||
async print(path, options) {
|
||||
try {
|
||||
if (!isInitialized) {
|
||||
console.warn('StyLua WASM module not initialized, returning original text');
|
||||
return (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
}
|
||||
// Wait for initialization to complete
|
||||
await initStyLua();
|
||||
|
||||
const text = (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
const config = getStyLuaConfig(options);
|
||||
@@ -167,11 +172,6 @@ const luaPlugin: Plugin = {
|
||||
options,
|
||||
};
|
||||
|
||||
// Initialize WASM module when plugin loads
|
||||
initStyLua().catch(error => {
|
||||
console.warn('Failed to initialize StyLua WASM module:', error);
|
||||
});
|
||||
|
||||
export default luaPlugin;
|
||||
export { languages };
|
||||
export const parsers = luaPlugin.parsers;
|
||||
|
||||
@@ -33,33 +33,38 @@ const pythonParser: Parser<string> = {
|
||||
locEnd: (node: string) => node.length,
|
||||
};
|
||||
|
||||
// Initialize Ruff WASM module
|
||||
// Lazy initialize Ruff WASM module
|
||||
let initPromise: Promise<void> | null = null;
|
||||
let isInitialized = false;
|
||||
|
||||
function initRuff(): Promise<void> {
|
||||
if (initPromise) {
|
||||
return initPromise;
|
||||
if (isInitialized) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
initPromise = (async () => {
|
||||
if (!isInitialized) {
|
||||
await ruffInit();
|
||||
isInitialized = true;
|
||||
}
|
||||
})();
|
||||
if (!initPromise) {
|
||||
initPromise = (async () => {
|
||||
try {
|
||||
await ruffInit();
|
||||
isInitialized = true;
|
||||
} catch (error) {
|
||||
console.warn('Failed to initialize Ruff WASM module:', error);
|
||||
initPromise = null;
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
// Printer configuration
|
||||
const pythonPrinter: Printer<string> = {
|
||||
print: (path, options) => {
|
||||
// @ts-expect-error -- Support async printer like shell plugin
|
||||
async print(path, options) {
|
||||
try {
|
||||
if (!isInitialized) {
|
||||
console.warn('Ruff WASM module not initialized, returning original text');
|
||||
return (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
}
|
||||
// Wait for initialization to complete
|
||||
await initRuff();
|
||||
|
||||
const text = (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
const config = getRuffConfig(options);
|
||||
@@ -135,11 +140,6 @@ const pythonPlugin: Plugin = {
|
||||
options,
|
||||
};
|
||||
|
||||
// Initialize WASM module when plugin loads
|
||||
initRuff().catch(error => {
|
||||
console.warn('Failed to initialize Ruff WASM module:', error);
|
||||
});
|
||||
|
||||
export default pythonPlugin;
|
||||
export { languages };
|
||||
export const parsers = pythonPlugin.parsers;
|
||||
|
||||
@@ -1,824 +0,0 @@
|
||||
import { CommentOrDocComment, LocArray, Node, NodeType, NodeWithBodyOrCases } from "jinx-rust";
|
||||
import {
|
||||
end,
|
||||
getBodyOrCases,
|
||||
getLastParameter,
|
||||
hasOuterAttributes,
|
||||
isInner,
|
||||
is_Attribute,
|
||||
is_AttributeOrDocComment,
|
||||
is_BlockCommentKind,
|
||||
is_BlockCommentNode,
|
||||
is_Comment,
|
||||
is_CommentOrDocComment,
|
||||
is_ExpressionWithBodyOrCases,
|
||||
is_ExternSpecifier,
|
||||
is_FlowControlExpression,
|
||||
is_FunctionDeclaration,
|
||||
is_FunctionNode,
|
||||
is_IfBlockExpression,
|
||||
is_LineCommentKind,
|
||||
is_LineCommentNode,
|
||||
is_LocArray,
|
||||
is_MacroRule,
|
||||
is_NodeWithBodyOrCases,
|
||||
is_ReassignmentNode,
|
||||
is_StatementNode,
|
||||
is_StructLiteralProperty,
|
||||
is_StructLiteralPropertySpread,
|
||||
nisAnyOf,
|
||||
ownStart,
|
||||
start,
|
||||
} from "jinx-rust/utils";
|
||||
import { is_CallExpression_or_CallLikeMacroInvocation } from "../transform";
|
||||
import { Narrow, assert, exit, iLast, last_of, maybe_last_of } from "../utils/common";
|
||||
import { is_MemberAccessLike, is_xVariableEqualishLike } from "./core";
|
||||
import {
|
||||
AnyComment,
|
||||
CustomOptions,
|
||||
DCM,
|
||||
Doc,
|
||||
MutatedAttribute,
|
||||
NodeWithComments,
|
||||
PrettierCommentInfo,
|
||||
breakParent,
|
||||
cursor,
|
||||
hardline,
|
||||
indent,
|
||||
join,
|
||||
line,
|
||||
lineSuffix,
|
||||
literalline,
|
||||
} from "./external";
|
||||
import { assertPathAtNode, canAttachComment, getAllComments, getContext, getNode, getOptions, pathCallEach } from "./plugin";
|
||||
import { shouldPrintOuterAttributesAbove } from "./styling";
|
||||
|
||||
function addCommentHelper(node: Node, comment: AnyComment, leading = false, trailing = false) {
|
||||
__DEV__: assert(!handled(comment));
|
||||
((node as NodeWithComments<Node>).comments ??= []).push(comment);
|
||||
(comment.leading = leading), (comment.trailing = trailing), (comment.printed = false);
|
||||
}
|
||||
|
||||
function addLeadingComment(node: Node, comment: AnyComment) {
|
||||
addCommentHelper(node, comment, true);
|
||||
}
|
||||
function addDanglingComment(node: Node, comment: AnyComment, marker: DCM) {
|
||||
addCommentHelper(node, comment);
|
||||
comment.marker = marker;
|
||||
}
|
||||
function addTrailingComment(node: Node, comment: AnyComment) {
|
||||
addCommentHelper(node, comment, false, true);
|
||||
}
|
||||
export function setPrettierIgnoreTarget(node: Node, comment: AnyComment) {
|
||||
__DEV__: Narrow<Node & { prettierIgnore?: true }>(node), assert(isPrettierIgnoreComment(comment) || isPrettierIgnoreAttribute(comment));
|
||||
comment.unignore = true;
|
||||
node.prettierIgnore = true;
|
||||
}
|
||||
|
||||
function hasComments<T extends Node>(node: T): node is NodeWithComments<T> {
|
||||
return "comments" in node && node.comments.length > 0;
|
||||
}
|
||||
|
||||
export function printDanglingComments(enclosingNode: Node, sameIndent: boolean, marker?: DCM) {
|
||||
if (hasComments(enclosingNode)) {
|
||||
const printed: Doc[] = [];
|
||||
pathCallEach(enclosingNode, "comments", (comment) => {
|
||||
if (isDangling(comment) && (!marker || comment.marker === marker)) {
|
||||
printed.push(printComment(comment));
|
||||
}
|
||||
});
|
||||
if (printed.length > 0) {
|
||||
return sameIndent //
|
||||
? join(hardline, printed)
|
||||
: indent([hardline, join(hardline, printed)]);
|
||||
}
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
export function needsHardlineAfterDanglingComment(node: Node) {
|
||||
if (!hasComment(node)) return false;
|
||||
const lastDanglingComment = maybe_last_of(getComments(node, CF.Dangling));
|
||||
return lastDanglingComment && is_LineCommentNode(lastDanglingComment);
|
||||
}
|
||||
export function setDidPrintComment(comment: AnyComment) {
|
||||
comment.printed = true;
|
||||
}
|
||||
|
||||
function printComment(comment: AnyComment) {
|
||||
__DEV__: assertPathAtNode("printComment", comment);
|
||||
__DEV__: assert(handled(comment), `Assertion failed: Comment was not printed at ${comment.loc.url()}`, comment);
|
||||
setDidPrintComment(comment);
|
||||
return getContext().options.printer.printComment!(getContext().path as any, getOptions());
|
||||
}
|
||||
|
||||
export function isPreviousLineEmpty(node: Node) {
|
||||
let index = start(node) - 1;
|
||||
index = skipSpaces(index, true) as number;
|
||||
index = skipNewline(index, true) as number;
|
||||
index = skipSpaces(index, true) as number;
|
||||
return index !== skipNewline(index, true);
|
||||
}
|
||||
export function hasBreaklineBefore(node: Node) {
|
||||
return hasNewline(start(node) - 1, true);
|
||||
}
|
||||
|
||||
export function hasBreaklineAfter(node: Node) {
|
||||
return hasNewline(end(node));
|
||||
}
|
||||
|
||||
export function printCommentsSeparately(ignored?: Set<AnyComment>) {
|
||||
const node = getNode();
|
||||
__DEV__: Narrow<Node & { comments?: AnyComment[] }>(node);
|
||||
|
||||
const leading: Doc[] = [];
|
||||
const trailing: Doc[] = [];
|
||||
let hasTrailingLineComment = false;
|
||||
let hadLeadingBlockComment = false;
|
||||
|
||||
if ("comments" in node) {
|
||||
pathCallEach(node, "comments", (comment) => {
|
||||
if (ignored?.has(comment)) {
|
||||
return;
|
||||
} else if (isLeading(comment)) {
|
||||
leading.push(printLeadingComment(comment));
|
||||
} else if (isTrailing(comment)) {
|
||||
trailing.push(printTrailingComment(comment));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (node === getOptions().cursorNode) {
|
||||
leading.unshift(cursor);
|
||||
trailing.push(cursor);
|
||||
}
|
||||
|
||||
return (leading.length | trailing.length) > 0 ? { leading, trailing } : ({ leading: "", trailing: "" } as const);
|
||||
|
||||
function printLeadingComment(comment: AnyComment) {
|
||||
if (is_Attribute(comment) && !comment.inner) {
|
||||
const printed = printComment(comment);
|
||||
return [printed, " "];
|
||||
}
|
||||
hadLeadingBlockComment ||= is_BlockCommentKind(comment) && hasBreaklineBefore(comment);
|
||||
return [
|
||||
printComment(comment),
|
||||
is_BlockCommentKind(comment)
|
||||
? hasBreaklineAfter(comment) //
|
||||
? hadLeadingBlockComment
|
||||
? hardline
|
||||
: line
|
||||
: " "
|
||||
: hardline,
|
||||
hasNewline(skipNewline(skipSpaces(end(comment)))) ? hardline : "",
|
||||
];
|
||||
}
|
||||
|
||||
function printTrailingComment(comment: AnyComment) {
|
||||
const printed = printComment(comment);
|
||||
return hasBreaklineBefore(comment)
|
||||
? lineSuffix([hardline, isPreviousLineEmpty(comment) ? hardline : "", printed])
|
||||
: is_BlockCommentNode(comment)
|
||||
? [" ", printed]
|
||||
: lineSuffix([" ", printed, hasTrailingLineComment === (hasTrailingLineComment = true) ? hardline : breakParent]);
|
||||
}
|
||||
}
|
||||
|
||||
export function getPostLeadingComment(comment: AnyComment) {
|
||||
// console.log(comment.loc.url());
|
||||
// is_BlockCommentKind(comment)
|
||||
// ? hasBreaklineAfter(comment) //
|
||||
// ? hasBreaklineBefore(comment)
|
||||
// ? hardline
|
||||
// : line
|
||||
// : " "
|
||||
// : hardline,
|
||||
return hasNewline(skipNewline(skipSpaces(end(comment)))) ? hardline : "";
|
||||
}
|
||||
|
||||
export function withComments<D extends Doc>(node: Node, printed: D, ignored?: Set<AnyComment>): D | Doc[] {
|
||||
__DEV__: assertPathAtNode("withComments", node);
|
||||
const { leading, trailing } = printCommentsSeparately(ignored);
|
||||
return leading || trailing ? [...leading!, printed, ...trailing!] : printed;
|
||||
// return needsOuterParens(node) ? group(["(", indent([softline, parts]), softline, ")"]) : parts;
|
||||
// return parts;
|
||||
}
|
||||
export function getComments(node: Node, ...args: Parameters<typeof getCommentTestFunction>): AnyComment[] {
|
||||
__DEV__: Narrow<Node & { comments?: AnyComment[] }>(node);
|
||||
// if (!node || !node.comments) return [];
|
||||
// if (args.length === 0) return node.comments;
|
||||
// return args.length > 0 ? node.comments.filter(getCommentTestFunction(...args)) : node.comments;
|
||||
return node && node.comments //
|
||||
? args.length > 0
|
||||
? node.comments.filter(getCommentTestFunction(...args))
|
||||
: node.comments
|
||||
: [];
|
||||
}
|
||||
|
||||
export function getFirstComment(node: Node, flags: CF, fn?: (comment: AnyComment) => boolean): AnyComment | undefined {
|
||||
const r = getComments(node, flags | CF.First, fn);
|
||||
return r.length === 0 ? undefined : r[0];
|
||||
}
|
||||
|
||||
export function escapeComments(flags: number, fn?: (comment: AnyComment) => boolean) {
|
||||
const comments = getAllComments().filter(getCommentTestFunction(flags, fn)) as AnyComment[];
|
||||
comments.forEach(setDidPrintComment);
|
||||
return new Set(comments);
|
||||
}
|
||||
|
||||
export const enum CF {
|
||||
Leading = 1 << 1,
|
||||
Trailing = 1 << 2,
|
||||
Dangling = 1 << 3,
|
||||
Block = 1 << 4,
|
||||
Line = 1 << 5,
|
||||
PrettierIgnore = 1 << 6,
|
||||
First = 1 << 7,
|
||||
Last = 1 << 8,
|
||||
}
|
||||
export function isPrettierIgnoreComment(comment: AnyComment) {
|
||||
return is_Comment(comment) && /^\s*prettier-ignore\s*/.test(comment.value) && !comment.unignore;
|
||||
}
|
||||
export function isPrettierIgnoreAttribute(node: Node): node is MutatedAttribute {
|
||||
return is_Attribute(node) && /^\s*rustfmt::skip\s*$/.test(node.value);
|
||||
}
|
||||
function getCommentTestFunction(flags: CF, fn?: (comment: AnyComment) => boolean) {
|
||||
return function (comment: AnyComment, index: number, comments: AnyComment[]) {
|
||||
__DEV__: Narrow<number>(flags), assert(handled(comment));
|
||||
return !(
|
||||
(flags & CF.Leading && !isLeading(comment)) ||
|
||||
(flags & CF.Trailing && !isTrailing(comment)) ||
|
||||
(flags & CF.Dangling && !isDangling(comment)) ||
|
||||
(flags & CF.Block && !is_BlockCommentKind(comment)) ||
|
||||
(flags & CF.Line && !is_LineCommentKind(comment)) ||
|
||||
(flags & CF.First && index !== 0) ||
|
||||
(flags & CF.Last && !iLast(index, comments)) ||
|
||||
(flags & CF.PrettierIgnore && !(isPrettierIgnoreComment(comment) || isPrettierIgnoreAttribute(comment))) ||
|
||||
(fn && !fn(comment))
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
export function hasComment(node: Node, flags: number = 0, fn?: (comment: AnyComment) => boolean) {
|
||||
if ("comments" in node && node.comments!.length > 0) {
|
||||
return flags || fn ? (node.comments as AnyComment[]).some(getCommentTestFunction(flags, fn)) : true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
export function hasNewlineInRange(leftIndex: number, rightIndex: number) {
|
||||
__DEV__: assert(leftIndex <= rightIndex);
|
||||
const text = getContext().options.originalText;
|
||||
for (var i = leftIndex; i < rightIndex; ++i) if (text.charCodeAt(i) === 10) return true;
|
||||
return false;
|
||||
}
|
||||
export function isNextLineEmpty(node: Node) {
|
||||
return isNextLineEmptyAfterIndex(end(node));
|
||||
}
|
||||
export function isNextLineEmptyAfterIndex(index: number | false) {
|
||||
let oldIdx: number | false = -1;
|
||||
let idx: number | false = index;
|
||||
while (idx !== oldIdx) {
|
||||
oldIdx = idx;
|
||||
idx = skipToLineEnd(idx);
|
||||
idx = skipBlockComment(idx);
|
||||
idx = skipSpaces(idx);
|
||||
idx = skipParens(idx);
|
||||
}
|
||||
idx = skipLineComment(idx);
|
||||
idx = skipParens(idx);
|
||||
idx = skipNewline(idx);
|
||||
idx = skipParens(idx);
|
||||
return idx !== false && hasNewline(idx);
|
||||
}
|
||||
export function hasNewline(index: number | false, backwards = false) {
|
||||
if (index === false) return false;
|
||||
const i = skipSpaces(index, backwards);
|
||||
return i !== false && i !== skipNewline(i, backwards);
|
||||
}
|
||||
function skipLineComment(index: number | false) {
|
||||
if (index === false) return false;
|
||||
const { commentSpans, originalText } = getContext().options;
|
||||
if (commentSpans.has(index) && originalText.charCodeAt(index + 1) === 47 /** "/" */)
|
||||
return skipEverythingButNewLine(commentSpans.get(index)!);
|
||||
return index;
|
||||
}
|
||||
function skipBlockComment(index: number | false) {
|
||||
if (index === false) return false;
|
||||
const { commentSpans, originalText } = getContext().options;
|
||||
if (commentSpans.has(index) && originalText.charCodeAt(index + 1) === 42 /** "*" */) return commentSpans.get(index)!;
|
||||
return index;
|
||||
}
|
||||
const [skipSpaces, skipToLineEnd, skipEverythingButNewLine] = [/[ \t]/, /[,; \t]/, /[^\r\n]/].map(function (re) {
|
||||
return function (index: number | false, backwards = false) {
|
||||
if (index === false) return false;
|
||||
const { originalText: text } = getContext().options;
|
||||
let cursor = index;
|
||||
while (cursor >= 0 && cursor < text.length) {
|
||||
if (re.test(text.charAt(cursor))) backwards ? cursor-- : cursor++;
|
||||
else return cursor;
|
||||
}
|
||||
return cursor === -1 || cursor === text.length ? cursor : false;
|
||||
};
|
||||
});
|
||||
|
||||
function skipNewline(index: number | false, backwards = false) {
|
||||
if (index === false) return false;
|
||||
const { originalText } = getContext().options;
|
||||
const atIndex = originalText.charCodeAt(index);
|
||||
if (backwards) {
|
||||
if (originalText.charCodeAt(index - 1) === 13 && atIndex === 10) return index - 2;
|
||||
if (atIndex === 10) return index - 1;
|
||||
} else {
|
||||
if (atIndex === 13 && originalText.charCodeAt(index + 1) === 10) return index + 2;
|
||||
if (atIndex === 10) return index + 1;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
function skipParens(index: number | false, backwards = false) {
|
||||
return index;
|
||||
// if (index === false) return false;
|
||||
// const { parensPositions } = getContext().options;
|
||||
// while (parensPositions.has(index)) backwards ? index-- : index++;
|
||||
// return index;
|
||||
}
|
||||
|
||||
export function getNextNonSpaceNonCommentCharacterIndex(node: Node) {
|
||||
return getNextNonSpaceNonCommentCharacterIndexWithStartIndex(end(node));
|
||||
}
|
||||
function getNextNonSpaceNonCommentCharacterIndexWithStartIndex(i: number) {
|
||||
let oldIdx = -1;
|
||||
let nextIdx = i;
|
||||
while (nextIdx !== oldIdx) {
|
||||
oldIdx = nextIdx;
|
||||
nextIdx = skipSpaces(nextIdx) as number;
|
||||
nextIdx = skipBlockComment(nextIdx) as number;
|
||||
nextIdx = skipLineComment(nextIdx) as number;
|
||||
nextIdx = skipNewline(nextIdx) as number;
|
||||
nextIdx = skipParens(nextIdx) as number;
|
||||
}
|
||||
return nextIdx;
|
||||
}
|
||||
export function getNextNonSpaceNonCommentCharacter(node: Node) {
|
||||
return getContext().options.originalText.charAt(getNextNonSpaceNonCommentCharacterIndex(node));
|
||||
}
|
||||
|
||||
interface CommentContext {
|
||||
comment: AnyComment;
|
||||
precedingNode: Node | undefined;
|
||||
enclosingNode: Node | undefined;
|
||||
followingNode: Node | undefined;
|
||||
text: string;
|
||||
options: CustomOptions;
|
||||
ast: Node;
|
||||
isLastComment: boolean;
|
||||
}
|
||||
|
||||
function handled(comment: AnyComment) {
|
||||
return "printed" in comment;
|
||||
}
|
||||
function handleCommon(ctx: CommentContext): boolean {
|
||||
{
|
||||
const { comment, precedingNode, enclosingNode, followingNode } = ctx;
|
||||
if (!enclosingNode) {
|
||||
ctx.enclosingNode = ctx.comment.loc.src.program;
|
||||
} else if (enclosingNode && is_NodeWithBodyOrCases(enclosingNode)) {
|
||||
const body = getBodyOrCases(enclosingNode);
|
||||
if (body) {
|
||||
if (is_ExpressionWithBodyOrCases(enclosingNode) && enclosingNode.label) {
|
||||
if (ctx.precedingNode === enclosingNode.label) {
|
||||
ctx.precedingNode = undefined;
|
||||
}
|
||||
if (followingNode === enclosingNode.label) {
|
||||
ctx.followingNode = undefined;
|
||||
}
|
||||
}
|
||||
if (comment.loc.isBefore(body)) {
|
||||
if (followingNode && body.loc.contains(followingNode)) {
|
||||
ctx.followingNode = undefined;
|
||||
}
|
||||
if (!ctx.precedingNode && !ctx.followingNode) {
|
||||
addLeadingComment(enclosingNode, comment);
|
||||
return true;
|
||||
}
|
||||
} else if (comment.loc.isAfter(body)) {
|
||||
if (precedingNode && body.loc.contains(precedingNode)) {
|
||||
ctx.precedingNode = undefined;
|
||||
}
|
||||
if (!ctx.precedingNode && !ctx.followingNode) {
|
||||
addTrailingComment(enclosingNode, comment);
|
||||
return true;
|
||||
}
|
||||
} else if (body.loc.contains(comment)) {
|
||||
if (precedingNode && !body.loc.contains(precedingNode)) {
|
||||
ctx.precedingNode = undefined;
|
||||
}
|
||||
if (followingNode && !body.loc.contains(followingNode)) {
|
||||
ctx.followingNode = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const fn of [
|
||||
handleMixedInOuterAttributeComments,
|
||||
handleAttributeComments,
|
||||
handleDanglingComments,
|
||||
handleFunctionComments,
|
||||
handleMacroRuleComments,
|
||||
handleStructLiteralComments,
|
||||
handleVariableDeclaratorComments,
|
||||
handleIfBlockExpressionComments,
|
||||
handleMemberExpressionComments,
|
||||
handleStatementComments,
|
||||
handleFlowControlComments,
|
||||
handleBadComments,
|
||||
]) {
|
||||
fn(ctx);
|
||||
if (handled(ctx.comment)) {
|
||||
// console.log(ctx.comment.loc.url(), fn.name);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
const { precedingNode, followingNode, comment } = ctx;
|
||||
|
||||
if (isStartOfLine(comment)) {
|
||||
if (followingNode) {
|
||||
addLeadingComment(followingNode, comment);
|
||||
} else if (precedingNode) {
|
||||
addTrailingComment(precedingNode, comment);
|
||||
} else {
|
||||
exit.never(ctx);
|
||||
}
|
||||
} else if (isEndOfLine(comment)) {
|
||||
if (precedingNode) {
|
||||
addTrailingComment(precedingNode, comment);
|
||||
} else if (followingNode) {
|
||||
addLeadingComment(followingNode, comment);
|
||||
} else {
|
||||
exit.never(ctx);
|
||||
}
|
||||
} else {
|
||||
if (precedingNode && followingNode) {
|
||||
return false;
|
||||
} else if (precedingNode) {
|
||||
addTrailingComment(precedingNode, comment);
|
||||
} else if (followingNode) {
|
||||
addLeadingComment(followingNode, comment);
|
||||
} else {
|
||||
exit.never(ctx);
|
||||
}
|
||||
}
|
||||
return handled(ctx.comment);
|
||||
}
|
||||
export function handleOwnLineComment(ctx: CommentContext) {
|
||||
return handleCommon(ctx);
|
||||
}
|
||||
export function handleEndOfLineComment(ctx: CommentContext) {
|
||||
const { precedingNode, enclosingNode, comment } = ctx;
|
||||
if (
|
||||
// handleCallExpressionComments
|
||||
precedingNode &&
|
||||
enclosingNode &&
|
||||
is_CallExpression_or_CallLikeMacroInvocation(enclosingNode) &&
|
||||
enclosingNode.arguments.length > 0 &&
|
||||
precedingNode === (enclosingNode.typeArguments ? last_of(enclosingNode.typeArguments) : enclosingNode.callee)
|
||||
) {
|
||||
addLeadingComment(enclosingNode.arguments[0], comment);
|
||||
return true;
|
||||
} else if (
|
||||
// handlePropertyComments
|
||||
enclosingNode &&
|
||||
is_StructLiteralProperty(enclosingNode)
|
||||
) {
|
||||
addLeadingComment(enclosingNode, comment);
|
||||
return true;
|
||||
} else {
|
||||
return handleCommon(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
export function handleRemainingComment(ctx: CommentContext) {
|
||||
return handleCommon(ctx);
|
||||
}
|
||||
|
||||
function handleStructLiteralComments({ enclosingNode, followingNode, comment }: CommentContext) {
|
||||
if (enclosingNode && is_StructLiteralPropertySpread(enclosingNode) && followingNode === enclosingNode.expression) {
|
||||
addLeadingComment(enclosingNode, comment);
|
||||
}
|
||||
}
|
||||
|
||||
function handleVariableDeclaratorComments({ enclosingNode, followingNode, comment }: CommentContext) {
|
||||
if (
|
||||
enclosingNode &&
|
||||
(is_xVariableEqualishLike(enclosingNode) || is_ReassignmentNode(enclosingNode)) &&
|
||||
followingNode &&
|
||||
(is_BlockCommentKind(comment) ||
|
||||
nisAnyOf(followingNode, [
|
||||
NodeType.StructLiteral,
|
||||
NodeType.StructPattern,
|
||||
NodeType.TupleLiteral,
|
||||
NodeType.TypeTuple,
|
||||
NodeType.TuplePattern,
|
||||
NodeType.ArrayLiteral,
|
||||
NodeType.ArrayPattern,
|
||||
NodeType.SizedArrayLiteral,
|
||||
NodeType.TypeSizedArray,
|
||||
]))
|
||||
) {
|
||||
addLeadingComment(followingNode, comment);
|
||||
}
|
||||
}
|
||||
|
||||
function handleMixedInOuterAttributeComments({ precedingNode, enclosingNode, followingNode, comment }: CommentContext) {
|
||||
if (enclosingNode && hasOuterAttributes(enclosingNode) && end(comment) <= ownStart(enclosingNode)) {
|
||||
if (isPrettierIgnoreComment(comment) || isPrettierIgnoreAttribute(comment)) {
|
||||
setPrettierIgnoreTarget(enclosingNode, comment);
|
||||
}
|
||||
if (isEndOfLine(comment)) {
|
||||
__DEV__: assert(!!precedingNode && is_Attribute(precedingNode), "", precedingNode);
|
||||
if (shouldPrintOuterAttributesAbove(enclosingNode)) {
|
||||
// #[attr] // comment
|
||||
// node
|
||||
addTrailingComment(precedingNode, comment);
|
||||
} else {
|
||||
// #[attr] /* comment */ node
|
||||
addLeadingComment(followingNode || enclosingNode, comment);
|
||||
}
|
||||
} else {
|
||||
// __DEV__: assert(isStartOfLine(comment));
|
||||
if (followingNode && end(followingNode) <= ownStart(enclosingNode)) {
|
||||
addLeadingComment(followingNode, comment);
|
||||
} else if (precedingNode && enclosingNode.loc.contains(precedingNode)) {
|
||||
addTrailingComment(precedingNode, comment);
|
||||
} else {
|
||||
addLeadingComment(enclosingNode, comment);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function handleAttributeComments({ precedingNode, enclosingNode, followingNode, comment, ast }: CommentContext) {
|
||||
if (is_AttributeOrDocComment(comment)) {
|
||||
if (
|
||||
comment.inner &&
|
||||
enclosingNode &&
|
||||
is_FunctionDeclaration(enclosingNode) &&
|
||||
(!followingNode || !is_StatementNode(followingNode)) &&
|
||||
(!precedingNode || !is_StatementNode(precedingNode))
|
||||
) {
|
||||
if (enclosingNode.body) {
|
||||
if (canAttachCommentInLocArray(enclosingNode.body)) {
|
||||
addDanglingComment(enclosingNode, comment, DCM["body"]);
|
||||
} else {
|
||||
addLeadingComment(enclosingNode.body[0], comment);
|
||||
}
|
||||
} else {
|
||||
addLeadingComment(enclosingNode, comment);
|
||||
}
|
||||
} else {
|
||||
// if (comment.loc.url().startsWith("tests/samples/macro/attr.rs") && getContext().options.danglingAttributes.includes(comment)) {
|
||||
// // debugger;
|
||||
// console.log({
|
||||
// comment: comment.loc.url(),
|
||||
// precedingNode: precedingNode?.loc.url(),
|
||||
// enclosingNode: enclosingNode?.loc.url(),
|
||||
// followingNode: followingNode?.loc.url(),
|
||||
// });
|
||||
// }
|
||||
if (followingNode) {
|
||||
addLeadingComment(followingNode, comment);
|
||||
} else if (enclosingNode) {
|
||||
for (var key in DCM)
|
||||
if (key in enclosingNode) {
|
||||
addDanglingComment(enclosingNode, comment, key as DCM);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
addDanglingComment(ast, comment, DCM["body"]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function handleBadComments({ precedingNode, enclosingNode, followingNode, ast, comment }: CommentContext) {
|
||||
if (!enclosingNode) {
|
||||
// console.log(comment.loc.url());
|
||||
if (followingNode) {
|
||||
addLeadingComment(followingNode, comment);
|
||||
} else if (precedingNode) {
|
||||
addTrailingComment(precedingNode, comment);
|
||||
} else {
|
||||
addDanglingComment(enclosingNode || ast, comment, DCM["body"]);
|
||||
}
|
||||
} else if (!precedingNode && !followingNode) {
|
||||
if (enclosingNode && enclosingNode !== ast) {
|
||||
addLeadingComment(enclosingNode, comment);
|
||||
} else {
|
||||
addDanglingComment(ast, comment, DCM["body"]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function is_ABI_Comment({ precedingNode, enclosingNode, comment }: CommentContext) {
|
||||
return (
|
||||
is_CommentOrDocComment(comment) &&
|
||||
((precedingNode && is_ExternSpecifier(precedingNode)) || (enclosingNode && is_ExternSpecifier(enclosingNode)))
|
||||
);
|
||||
}
|
||||
function handleFlowControlComments({ precedingNode, enclosingNode, followingNode, comment }: CommentContext) {
|
||||
if (enclosingNode && is_FlowControlExpression(enclosingNode)) {
|
||||
if (!precedingNode && (isOwnLine(comment) || isEndOfLine(comment)) && !followingNode) {
|
||||
addLeadingComment(enclosingNode, comment);
|
||||
}
|
||||
}
|
||||
}
|
||||
function handleFunctionComments(ctx: CommentContext) {
|
||||
const { precedingNode, enclosingNode, followingNode, comment } = ctx;
|
||||
if (enclosingNode && is_FunctionNode(enclosingNode)) {
|
||||
if (
|
||||
is_FunctionDeclaration(enclosingNode) &&
|
||||
((!is_ABI_Comment(ctx) && comment.loc.isBefore(enclosingNode.generics || enclosingNode.id)) ||
|
||||
(enclosingNode.generics && comment.loc.isBetween(enclosingNode.generics, enclosingNode.parameters)))
|
||||
) {
|
||||
addLeadingComment(enclosingNode, comment);
|
||||
} else if (
|
||||
!enclosingNode.returnType &&
|
||||
comment.loc.isBetween(
|
||||
enclosingNode.parameters,
|
||||
is_FunctionDeclaration(enclosingNode) ? enclosingNode.body! : enclosingNode.expression
|
||||
)
|
||||
) {
|
||||
if (is_FunctionDeclaration(enclosingNode)) {
|
||||
addCommentToBlock(enclosingNode, comment);
|
||||
} else {
|
||||
addLeadingComment(enclosingNode.expression, comment);
|
||||
}
|
||||
} else if (
|
||||
precedingNode && //
|
||||
enclosingNode.parameters.loc.contains(comment)
|
||||
) {
|
||||
if (precedingNode === getLastParameter(enclosingNode)) {
|
||||
addTrailingComment(precedingNode, comment);
|
||||
}
|
||||
} else if (
|
||||
followingNode &&
|
||||
isStartOfLine(comment) &&
|
||||
comment.loc.isAfter(enclosingNode.parameters) &&
|
||||
(!is_FunctionDeclaration(enclosingNode) || !enclosingNode.whereBounds || comment.loc.isAfter(enclosingNode.whereBounds!)) &&
|
||||
(!enclosingNode.returnType || comment.loc.isAfter(enclosingNode.returnType)) &&
|
||||
followingNode === (is_FunctionDeclaration(enclosingNode) ? enclosingNode.body?.[0] : enclosingNode.expression)
|
||||
) {
|
||||
addLeadingComment(followingNode, comment);
|
||||
}
|
||||
}
|
||||
}
|
||||
function handleMacroRuleComments(ctx: CommentContext) {
|
||||
const { precedingNode, enclosingNode, followingNode, comment } = ctx;
|
||||
if (enclosingNode && is_MacroRule(enclosingNode)) {
|
||||
if (enclosingNode.transform.loc.contains(comment)) {
|
||||
__DEV__: assert(enclosingNode.transform.length > 0);
|
||||
if (!precedingNode || !enclosingNode.transform.loc.contains(precedingNode)) {
|
||||
__DEV__: assert(!!followingNode && enclosingNode.transform.loc.contains(followingNode));
|
||||
addLeadingComment(followingNode, comment);
|
||||
}
|
||||
} else if (enclosingNode.match.loc.contains(comment)) {
|
||||
__DEV__: assert(enclosingNode.match.length > 0);
|
||||
if (!followingNode || !enclosingNode.match.loc.contains(followingNode)) {
|
||||
__DEV__: assert(!!precedingNode && enclosingNode.match.loc.contains(precedingNode));
|
||||
addTrailingComment(precedingNode!, comment);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function handleStatementComments(ctx: CommentContext) {
|
||||
const { precedingNode, comment } = ctx;
|
||||
if (isEndOfLine(comment) && precedingNode && (is_StatementNode(precedingNode) || precedingNode.loc.sliceText().endsWith(";"))) {
|
||||
addTrailingComment(precedingNode, comment);
|
||||
}
|
||||
}
|
||||
|
||||
function addCommentToBlock(block: NodeWithBodyOrCases, comment: AnyComment) {
|
||||
const body = getBodyOrCases(block);
|
||||
__DEV__: assert(!!body);
|
||||
if (body.length > 0) {
|
||||
addLeadingComment(body![0], comment);
|
||||
} else {
|
||||
addDanglingComment(block, comment, DCM["body"]);
|
||||
}
|
||||
}
|
||||
|
||||
function handleIfBlockExpressionComments(ctx: CommentContext) {
|
||||
const { comment, enclosingNode } = ctx;
|
||||
if (enclosingNode && is_IfBlockExpression(enclosingNode)) {
|
||||
const { condition, body, else: else_ } = enclosingNode;
|
||||
if (comment.loc.isBefore(condition)) {
|
||||
addLeadingComment(condition, comment);
|
||||
} else if (comment.loc.isBetween(condition, body)) {
|
||||
addTrailingComment(condition, comment);
|
||||
} else if (else_ && comment.loc.isBetween(body, else_)) {
|
||||
if (is_IfBlockExpression(else_)) {
|
||||
addLeadingComment(else_.condition, comment);
|
||||
} else {
|
||||
addCommentToBlock(else_, comment);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function handleMemberExpressionComments({ comment, precedingNode, enclosingNode }: CommentContext) {
|
||||
if (enclosingNode && is_MemberAccessLike(enclosingNode)) {
|
||||
if (isStartOfLine(comment) || !precedingNode) addLeadingComment(enclosingNode, comment);
|
||||
else addTrailingComment(precedingNode, comment);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function handleDanglingComments({ comment, enclosingNode }: CommentContext) {
|
||||
if (enclosingNode) {
|
||||
for (var key in DCM) {
|
||||
if (key in enclosingNode) {
|
||||
var arr: LocArray = enclosingNode[key];
|
||||
if (is_LocArray(arr) && canAttachCommentInLocArray(arr) && arr.loc.contains(comment)) {
|
||||
addDanglingComment(enclosingNode, comment, key as DCM);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function canAttachCommentInLocArray(arr: LocArray) {
|
||||
return arr.length === 0 || arr.every((node) => !canAttachComment(node));
|
||||
}
|
||||
|
||||
function isOwnLine(comment: AnyComment) {
|
||||
return isStartOfLine(comment) && hasBreaklineAfter(comment);
|
||||
}
|
||||
function isStartOfLine(comment: AnyComment) {
|
||||
return comment.placement === "ownLine";
|
||||
}
|
||||
function isEndOfLine(comment: AnyComment) {
|
||||
return comment.placement === "endOfLine";
|
||||
}
|
||||
export function isDangling(comment: AnyComment) {
|
||||
__DEV__: assert(handled(comment));
|
||||
return !comment.leading && !comment.trailing;
|
||||
}
|
||||
export function isLeading(comment: AnyComment) {
|
||||
__DEV__: assert(handled(comment));
|
||||
return comment.leading && !comment.trailing;
|
||||
}
|
||||
export function isTrailing(comment: AnyComment) {
|
||||
__DEV__: assert(handled(comment));
|
||||
return !comment.leading && comment.trailing;
|
||||
}
|
||||
|
||||
export function print_comment(comment: CommentOrDocComment) {
|
||||
__DEV__: Narrow<PrettierCommentInfo>(comment);
|
||||
|
||||
const doc = is_BlockCommentNode(comment)
|
||||
? isIndentableBlockComment(comment.value)
|
||||
? [
|
||||
(!handled(comment) || isTrailing(comment)) && !hasBreaklineBefore(comment) ? hardline : "",
|
||||
getCommentStart(comment),
|
||||
...comment.value.split(/\n/g).map((line, i, a) =>
|
||||
i === 0 //
|
||||
? [line.trimEnd(), hardline]
|
||||
: !iLast(i, a)
|
||||
? [" " + line.trim(), hardline]
|
||||
: " " + line.trimStart()
|
||||
),
|
||||
"*/",
|
||||
]
|
||||
: [
|
||||
getCommentStart(comment), //
|
||||
join(literalline, comment.value.split(/\n/g)),
|
||||
"*/",
|
||||
]
|
||||
: [getCommentStart(comment), comment.value.trimEnd()];
|
||||
|
||||
return handled(comment) && isDangling(comment) //
|
||||
? [doc, getPostLeadingComment(comment)]
|
||||
: doc;
|
||||
|
||||
function getCommentStart(comment: CommentOrDocComment) {
|
||||
return is_Comment(comment)
|
||||
? is_BlockCommentKind(comment)
|
||||
? "/*"
|
||||
: "//"
|
||||
: is_BlockCommentKind(comment)
|
||||
? isInner(comment)
|
||||
? "/*!"
|
||||
: "/**"
|
||||
: isInner(comment)
|
||||
? "//!"
|
||||
: "///";
|
||||
}
|
||||
function isIndentableBlockComment(value: string) {
|
||||
const lines = `*${value}*`.split(/\n/g);
|
||||
return lines.length > 1 && lines.every((line) => /^\s*\*/.test(line));
|
||||
}
|
||||
}
|
||||
@@ -1,282 +0,0 @@
|
||||
import {
|
||||
ForLtParametersBody,
|
||||
FunctionSpread,
|
||||
GenericParameterDeclaration,
|
||||
MaybeGenericArgsTarget,
|
||||
MissingNode,
|
||||
Node,
|
||||
NodeType,
|
||||
TypeBound,
|
||||
TypeBoundsConstaint,
|
||||
TypeCallArgument,
|
||||
TypeNamespaceTargetNoSelector,
|
||||
TypeNode,
|
||||
} from "jinx-rust";
|
||||
import {
|
||||
getAstPath,
|
||||
getOwnChildAstPath,
|
||||
is_BareTypeTraitBound,
|
||||
is_FunctionSpread,
|
||||
is_LetScrutinee,
|
||||
is_Literal,
|
||||
is_MissingNode,
|
||||
is_TypeBoundsStandaloneNode,
|
||||
is_TypeFunctionNode,
|
||||
is_TypeNode,
|
||||
is_VariableDeclarationNode,
|
||||
} from "jinx-rust/utils";
|
||||
import { exit, has_key_defined, last_of, spliceAll } from "../utils/common";
|
||||
import { canBreak } from "./external";
|
||||
import { getContext, getNode, getOptions, getPrintFn } from "./plugin";
|
||||
|
||||
let DEPTH = 0;
|
||||
const ANCESTRY: Node[] = [];
|
||||
const LONE_SHORT_ARGUMENT_THRESHOLD_RATE = 0.25;
|
||||
|
||||
export function withCheckContext<R>(fn: () => R): R {
|
||||
if (0 === DEPTH) {
|
||||
return fn();
|
||||
} else {
|
||||
DEPTH = 0;
|
||||
const prev = spliceAll(ANCESTRY);
|
||||
try {
|
||||
return fn();
|
||||
} finally {
|
||||
DEPTH = ANCESTRY.push(...prev);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function is_short(str: string) {
|
||||
return str.length <= LONE_SHORT_ARGUMENT_THRESHOLD_RATE * getOptions().printWidth;
|
||||
}
|
||||
function print(target: Node) {
|
||||
const current = getNode();
|
||||
const keys: (string | number)[] = [...getAstPath(ANCESTRY[0], getNode())];
|
||||
for (let i = 1; i < ANCESTRY.length; i++) keys.push(...getOwnChildAstPath(ANCESTRY[i - 1], ANCESTRY[i]));
|
||||
keys.push(...getOwnChildAstPath(last_of(ANCESTRY), target));
|
||||
try {
|
||||
return getContext().path.call(() => getPrintFn(target)(), ...keys);
|
||||
} catch (e) {
|
||||
console.log({ current, target, keys, ANCESTRY });
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function IsSimpleFunction<T extends Node>(fn: (node: T) => boolean): (node: T) => boolean {
|
||||
return function (node: T) {
|
||||
if (0 !== DEPTH && node === ANCESTRY[DEPTH - 1]) {
|
||||
return fn(node);
|
||||
}
|
||||
|
||||
if (DEPTH >= 2) {
|
||||
return isShortBasic(node);
|
||||
}
|
||||
|
||||
try {
|
||||
return fn((ANCESTRY[DEPTH++] = node) as any);
|
||||
} finally {
|
||||
ANCESTRY.length = --DEPTH;
|
||||
}
|
||||
} as any;
|
||||
}
|
||||
|
||||
function HasComplexFunction<T extends Node>(fn: (node: T) => boolean): (node: T) => boolean {
|
||||
return function (node: T) {
|
||||
if (0 !== DEPTH && node === ANCESTRY[DEPTH - 1]) {
|
||||
return fn(node);
|
||||
}
|
||||
|
||||
if (DEPTH >= 2) {
|
||||
return !isShortBasic(node);
|
||||
}
|
||||
|
||||
try {
|
||||
return fn((ANCESTRY[DEPTH++] = node) as any);
|
||||
} finally {
|
||||
ANCESTRY.length = --DEPTH;
|
||||
}
|
||||
} as any;
|
||||
}
|
||||
|
||||
const isShortBasic = (node: Node) => {
|
||||
switch (node.nodeType) {
|
||||
case NodeType.MissingNode:
|
||||
return true;
|
||||
case NodeType.Identifier:
|
||||
case NodeType.Index:
|
||||
case NodeType.LtIdentifier:
|
||||
case NodeType.LbIdentifier:
|
||||
case NodeType.McIdentifier:
|
||||
return is_short(node.name);
|
||||
case NodeType.Literal:
|
||||
return is_short(node.value) && !/\n/.test(node.value);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
export const isSimpleType = IsSimpleFunction<FunctionSpread | TypeNode | MissingNode>((node): boolean => {
|
||||
switch (node.nodeType) {
|
||||
case NodeType.MissingNode:
|
||||
case NodeType.FunctionSpread:
|
||||
return true;
|
||||
case NodeType.MacroInvocation:
|
||||
return false;
|
||||
case NodeType.Identifier:
|
||||
case NodeType.TypeNever:
|
||||
case NodeType.TypeInferred:
|
||||
return true;
|
||||
case NodeType.TypePath:
|
||||
return isShortBasic(node.segment) && (!node.namespace || isSimpleType(node.namespace));
|
||||
case NodeType.TypeCall:
|
||||
return isSimpleType(node.typeCallee) && !hasComplexTypeArguments(node);
|
||||
case NodeType.ExpressionTypeSelector:
|
||||
return isSimpleType(node.typeTarget) && (!node.typeExpression || isSimpleType(node.typeExpression));
|
||||
case NodeType.TypeDynBounds:
|
||||
return !hasComplexTypeBounds(node);
|
||||
case NodeType.TypeImplBounds:
|
||||
return !hasComplexTypeBounds(node);
|
||||
case NodeType.TypeFnPointer: {
|
||||
const param = node.parameters[0];
|
||||
return (
|
||||
(!node.extern || !node.extern.abi || isShortBasic(node.extern.abi)) &&
|
||||
!hasComplexLtParameters(node) &&
|
||||
(node.parameters.length === 0 ||
|
||||
(node.parameters.length === 1 &&
|
||||
(is_FunctionSpread(param) ||
|
||||
(!is_TypeFunctionNode(param.typeAnnotation) && isSimpleType(param.typeAnnotation))))) &&
|
||||
(!node.returnType || isSimpleType(node.returnType))
|
||||
);
|
||||
}
|
||||
case NodeType.TypeFunction:
|
||||
return isSimpleType(node.callee) && node.parameters.every(isSimpleType) && (!node.returnType || isSimpleType(node.returnType));
|
||||
case NodeType.TypeSizedArray:
|
||||
return isSimpleType(node.typeExpression) && isShortBasic(node.sizeExpression);
|
||||
case NodeType.TypeSlice:
|
||||
return isSimpleType(node.typeExpression);
|
||||
case NodeType.TypeTuple:
|
||||
return node.items.length === 0 || (node.items.length === 1 && isSimpleType(node.items[0]));
|
||||
case NodeType.TypeReference:
|
||||
case NodeType.TypeDereferenceMut:
|
||||
case NodeType.TypeDereferenceConst:
|
||||
case NodeType.TypeParenthesized:
|
||||
return isSimpleType(node.typeExpression);
|
||||
default:
|
||||
__DEV__: exit.never(node);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
export const hasComplexTypeBounds = HasComplexFunction<Extract<Node, TypeBoundsConstaint>>((node) => {
|
||||
return !!node.typeBounds && node.typeBounds.length > 1 && !node.typeBounds.every(isSimpleTypeBound);
|
||||
});
|
||||
|
||||
export const isSimpleTypeBound = (node: TypeBound): boolean => {
|
||||
switch (node.nodeType) {
|
||||
case NodeType.TypeParenthesized:
|
||||
return isSimpleTypeBound(node.typeExpression);
|
||||
// #Lifetime
|
||||
case NodeType.LtIdentifier:
|
||||
case NodeType.LtElided:
|
||||
case NodeType.LtStatic:
|
||||
return true;
|
||||
case NodeType.TypeTraitBound:
|
||||
return is_BareTypeTraitBound(node) && isSimpleTypeNamespaceTargetNoSelector(node.typeExpression);
|
||||
default:
|
||||
__DEV__: exit.never(node);
|
||||
return false;
|
||||
}
|
||||
function isSimpleTypeNamespaceTargetNoSelector(node: TypeNamespaceTargetNoSelector): boolean {
|
||||
switch (node.nodeType) {
|
||||
case NodeType.Identifier:
|
||||
return true;
|
||||
case NodeType.TypePath:
|
||||
return undefined === node.namespace || isSimpleTypeNamespaceTargetNoSelector(node.namespace);
|
||||
case NodeType.TypeCall:
|
||||
return false;
|
||||
case NodeType.TypeFunction:
|
||||
return isSimpleTypeNamespaceTargetNoSelector(node.callee) && node.parameters.length === 0 && !node.returnType;
|
||||
default:
|
||||
__DEV__: exit.never(node);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const isSimpleTypeArgument = IsSimpleFunction<TypeCallArgument>((node) => {
|
||||
if (is_TypeNode(node)) {
|
||||
return isSimpleType(node);
|
||||
}
|
||||
switch (node.nodeType) {
|
||||
// #Lifetime
|
||||
case NodeType.LtIdentifier:
|
||||
case NodeType.LtElided:
|
||||
case NodeType.LtStatic:
|
||||
case NodeType.Literal:
|
||||
return true;
|
||||
case NodeType.MinusExpression:
|
||||
return is_Literal(node.expression);
|
||||
case NodeType.BlockExpression:
|
||||
return false; //willBreak(getPrintFn(node)("body"));
|
||||
case NodeType.TypeCallNamedArgument:
|
||||
return isSimpleType(node.typeExpression);
|
||||
case NodeType.TypeCallNamedBound:
|
||||
return isSimpleType(node.typeTarget) && !hasComplexTypeBounds(node);
|
||||
default:
|
||||
__DEV__: exit.never(node);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
export const hasComplexTypeArguments = HasComplexFunction<Extract<Node, MaybeGenericArgsTarget>>((node) =>
|
||||
!node.typeArguments || node.typeArguments.length === 0
|
||||
? false
|
||||
: node.typeArguments.length === 1
|
||||
? (() => {
|
||||
const arg = node.typeArguments[0];
|
||||
return is_TypeBoundsStandaloneNode(arg) || canBreak(print(arg));
|
||||
})()
|
||||
: true
|
||||
);
|
||||
|
||||
export const hasComplexLtParameters = HasComplexFunction<Extract<Node, ForLtParametersBody>>((node) => {
|
||||
const ltParameters = node.ltParameters;
|
||||
if (!ltParameters || ltParameters.length === 0) {
|
||||
return false;
|
||||
}
|
||||
if (ltParameters.length === 1) {
|
||||
const arg = ltParameters[0];
|
||||
if (arg.ltBounds && arg.ltBounds.length > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
export const isShortGenericParameterDeclaration = IsSimpleFunction<GenericParameterDeclaration>((node) => {
|
||||
switch (node.nodeType) {
|
||||
case NodeType.GenericTypeParameterDeclaration:
|
||||
return !node.typeBounds && !node.typeDefault;
|
||||
case NodeType.ConstTypeParameterDeclaration:
|
||||
return (!node.typeAnnotation || is_MissingNode(node)) && !node.typeDefault;
|
||||
case NodeType.GenericLtParameterDeclaration:
|
||||
return !node.ltBounds;
|
||||
default:
|
||||
exit.never();
|
||||
}
|
||||
});
|
||||
|
||||
export const hasComplexGenerics = HasComplexFunction<Node>((node) => {
|
||||
return has_key_defined(node, "generics") && node.generics.length > 0 && !node.generics.every(isShortGenericParameterDeclaration);
|
||||
});
|
||||
|
||||
export const hasComplexTypeAnnotation = HasComplexFunction<Node>((node) => {
|
||||
if (is_VariableDeclarationNode(node) && !is_LetScrutinee(node)) {
|
||||
const { typeAnnotation } = node;
|
||||
return !!typeAnnotation && !is_MissingNode(typeAnnotation) && !isSimpleType(typeAnnotation);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,126 +0,0 @@
|
||||
import { Attribute, AttributeOrDocComment, Comment, DocCommentAttribute, LocArray, MemberExpression, Node, SourceFile } from "jinx-rust";
|
||||
import { PickProps } from "jinx-rust/utils";
|
||||
import type { Doc, ParserOptions, Printer } from "prettier";
|
||||
import { doc } from "prettier";
|
||||
import { AssertTypesEq } from "../utils/common";
|
||||
|
||||
export type { Doc, ParserOptions, Plugin, Printer } from "prettier";
|
||||
|
||||
|
||||
export const {
|
||||
join,
|
||||
line,
|
||||
softline,
|
||||
hardline,
|
||||
literalline,
|
||||
group,
|
||||
conditionalGroup,
|
||||
fill,
|
||||
lineSuffix,
|
||||
lineSuffixBoundary,
|
||||
cursor,
|
||||
breakParent,
|
||||
ifBreak,
|
||||
trim,
|
||||
indent,
|
||||
indentIfBreak,
|
||||
align,
|
||||
addAlignmentToDoc,
|
||||
markAsRoot,
|
||||
dedentToRoot,
|
||||
dedent,
|
||||
hardlineWithoutBreakParent,
|
||||
literallineWithoutBreakParent,
|
||||
label,
|
||||
} = doc.builders;
|
||||
|
||||
export const {
|
||||
willBreak,
|
||||
traverseDoc,
|
||||
findInDoc,
|
||||
mapDoc,
|
||||
removeLines,
|
||||
stripTrailingHardline,
|
||||
} = doc.utils;
|
||||
|
||||
// Fallback implementations for removed common in prettier 3
|
||||
export const isConcat = (doc: any): boolean => Array.isArray(doc);
|
||||
export const getDocParts = (doc: any): any[] => Array.isArray(doc) ? doc : [doc];
|
||||
export const propagateBreaks = (doc: any): any => doc;
|
||||
export const normalizeParts = (parts: any[]): any[] => parts.flat();
|
||||
export const normalizeDoc = (doc: any): any => doc;
|
||||
export const cleanDoc = (doc: any): any => doc;
|
||||
export const canBreak = (doc: any): boolean => {
|
||||
if (!doc) return false;
|
||||
if (typeof doc === 'string') return false;
|
||||
if (Array.isArray(doc)) return doc.some(canBreak);
|
||||
if (doc.type === 'group' || doc.type === 'fill') return true;
|
||||
return willBreak(doc);
|
||||
};
|
||||
|
||||
export const Symbol_comments = Symbol.for("comments");
|
||||
|
||||
export interface CustomOptions extends ParserOptions<Node> {
|
||||
[Symbol_comments]: AnyComment[];
|
||||
rsParsedFile: SourceFile;
|
||||
commentSpans: Map<number, number>;
|
||||
printer: Printer<Node>;
|
||||
cursorNode: any;
|
||||
|
||||
comments: Comment[];
|
||||
danglingAttributes: AttributeOrDocComment[];
|
||||
actuallyMethodNodes: WeakSet<MemberExpression>;
|
||||
}
|
||||
|
||||
export type NodeWithComments<T extends Node> = T & { comments: AnyComment[] };
|
||||
export interface MutatedComment extends Comment, PrettierCommentInfo {}
|
||||
export interface MutatedAttribute extends Attribute, PrettierCommentInfo {}
|
||||
export interface MutatedDocComment extends DocCommentAttribute, PrettierCommentInfo {}
|
||||
export type AnyComment = MutatedComment | MutatedAttribute | MutatedDocComment;
|
||||
|
||||
type keyofDelimitedArrayProps<T> = T extends never ? never : keyof PickProps<T, LocArray<any, "()" | "[]" | "{}" | "<>">>;
|
||||
|
||||
__DEV__: AssertTypesEq<keyof typeof DCM, keyofDelimitedArrayProps<Node>>();
|
||||
|
||||
export enum DCM {
|
||||
"arguments" = "arguments",
|
||||
"parameters" = "parameters",
|
||||
"items" = "items",
|
||||
"properties" = "properties",
|
||||
"members" = "members",
|
||||
"body" = "body",
|
||||
"cases" = "cases",
|
||||
"typeArguments" = "typeArguments",
|
||||
"ltParameters" = "ltParameters",
|
||||
"generics" = "generics",
|
||||
"specifiers" = "specifiers",
|
||||
"rules" = "rules",
|
||||
"match" = "match",
|
||||
"transform" = "transform",
|
||||
"segments" = "segments",
|
||||
}
|
||||
|
||||
export interface PrettierCommentInfo {
|
||||
trailing: boolean;
|
||||
leading: boolean;
|
||||
unignore: boolean;
|
||||
printed: boolean;
|
||||
placement: "ownLine" | "endOfLine" | "remaining";
|
||||
// nodeDescription?: any;
|
||||
marker?: DCM;
|
||||
}
|
||||
|
||||
export interface AstPath<T = Node> {
|
||||
stack: (Node | string | number)[];
|
||||
callParent<R>(callback: (path: this) => R, count?: number): R;
|
||||
getName(): PropertyKey | null;
|
||||
getValue(): T;
|
||||
getNode(count?: number): T | null;
|
||||
getParentNode(count?: number): T | null;
|
||||
|
||||
match(...predicates: ((node: Node, name: string | null, number: number | null) => boolean)[]): boolean;
|
||||
|
||||
call<R>(callback: (path: AstPath, index: number, value: any) => R, ...props: (string | number)[]): R;
|
||||
each(callback: (path: AstPath, index: number, value: any) => void, ...props: (string | number)[]): void;
|
||||
map<R>(callback: (path: AstPath, index: number, value: any) => R, ...props: (string | number)[]): R[];
|
||||
}
|
||||
@@ -1,367 +0,0 @@
|
||||
import { AttributeOrComment, IfBlockExpression, Node, Program, rs } from "jinx-rust";
|
||||
import {
|
||||
ArrayProps,
|
||||
BoolProps,
|
||||
NodeProps,
|
||||
end,
|
||||
hasAttributes,
|
||||
insertNodes,
|
||||
is_Attribute,
|
||||
is_AttributeOrComment,
|
||||
is_BlockCommentKind,
|
||||
is_BlockCommentNode,
|
||||
is_Comment,
|
||||
is_DocCommentAttribute,
|
||||
is_ElseBlock,
|
||||
is_LineCommentNode,
|
||||
is_MacroInvocation,
|
||||
is_MacroRule,
|
||||
is_MissingNode,
|
||||
is_Node,
|
||||
is_PunctuationToken,
|
||||
is_UnionPattern,
|
||||
start,
|
||||
} from "jinx-rust/utils";
|
||||
import { getCommentChildNodes, isTransformed, transform_ast } from "../transform";
|
||||
import { Narrow, assert, color, each, exit, iLast, is_array, map_tagged_template, print_string } from "../utils/common";
|
||||
import {
|
||||
CF,
|
||||
escapeComments,
|
||||
getComments,
|
||||
handleEndOfLineComment,
|
||||
handleOwnLineComment,
|
||||
handleRemainingComment,
|
||||
hasBreaklineAfter,
|
||||
hasComment,
|
||||
isDangling,
|
||||
isPrettierIgnoreAttribute,
|
||||
setDidPrintComment,
|
||||
withComments,
|
||||
} from "./comments";
|
||||
import { withCheckContext } from "./complexity";
|
||||
import { isNoopExpressionStatement, maybeEmptyLine } from "./core";
|
||||
import { AstPath, CustomOptions, Doc, Plugin, Symbol_comments, group, hardline, indent, line, softline, ParserOptions } from "./external";
|
||||
import { printer } from "./printer";
|
||||
import { needsInnerParens, needsOuterSoftbreakParens, shouldPrintOuterAttributesAbove } from "./styling";
|
||||
|
||||
export function is_printing_macro() {
|
||||
return getContext().path.stack.some((node) => is_Node(node) && (is_MacroInvocation(node) || is_Attribute(node)));
|
||||
}
|
||||
|
||||
export function assertPathAtNode(name: string, node: Node, ...ctx: any[]) {
|
||||
__DEV__: if (getNode() !== node)
|
||||
exit(`Attempted to call ${name}() in wrong prettier path context`, { asserted: node, actual: getNode() }, ...ctx);
|
||||
}
|
||||
|
||||
export function f(...args: [strings: TemplateStringsArray, ...values: Doc[]]) {
|
||||
let cancel = false;
|
||||
const res = map_tagged_template(args, (doc) => {
|
||||
cancel ||= !doc || (is_array(doc) && doc.length === 0);
|
||||
return doc;
|
||||
});
|
||||
return cancel ? "" : res;
|
||||
}
|
||||
|
||||
export function sg_single(s: TemplateStringsArray, v_0: Doc) {
|
||||
return group([s[0], indent([softline, v_0]), softline, s[1]]);
|
||||
}
|
||||
export function sg_duo(s: TemplateStringsArray, v_0: Doc, v_1: Doc) {
|
||||
return group([s[0], indent([softline, v_0, s[1], line, v_1]), softline, s[2]]);
|
||||
}
|
||||
|
||||
let ctx: {
|
||||
path: AstPath;
|
||||
options: CustomOptions;
|
||||
print: (path?: AstPath | string | [] | undefined, args?: any) => Doc;
|
||||
args: any;
|
||||
};
|
||||
|
||||
export const getNode = () => ctx.path.stack[ctx.path.stack.length - 1] as Node;
|
||||
export const stackIncludes = (x: Node | string | number) => ctx.path.stack.includes(x);
|
||||
export const getContext = () => ctx;
|
||||
export const getOptions = () => ctx.options;
|
||||
export const getProgram = () => ctx.options.rsParsedFile.program;
|
||||
export const getAllComments = () => ctx.options[Symbol_comments];
|
||||
export const getParentNode = (child?: Node) => {
|
||||
__DEV__: if (child) assertPathAtNode("getParentNode", child);
|
||||
return ctx.path.getParentNode();
|
||||
};
|
||||
export const getGrandParentNode = () => ctx.path.getParentNode(1) as Node;
|
||||
export const getPrintFn = <T extends Node>(forNode?: T | undefined): print<T> => {
|
||||
__DEV__: if (forNode) assertPathAtNode("getPrintFn", forNode);
|
||||
return print as print<T>;
|
||||
};
|
||||
|
||||
const get = (property: keyof any) => getNode()[property];
|
||||
const has = (property: keyof any) => !!get(property);
|
||||
|
||||
export function pathCall<T extends Node, K extends keyof NodeProps<T> & keyof T, R>(node: T, key: K, fn: (child: T[K]) => R): R {
|
||||
return ctx.path.call(() => fn(getNode() as any), key as any);
|
||||
}
|
||||
|
||||
export function pathCallEach<T extends Node, K extends AK<T>>(
|
||||
node: T,
|
||||
key: K, // @ts-expect-error
|
||||
fn: (child: NonNullable<T[K]>[number], index: number) => void
|
||||
) {
|
||||
__DEV__: assertPathAtNode("", node); // @ts-expect-error
|
||||
ctx.path.each((_, i) => fn(getNode() as any, i), key);
|
||||
}
|
||||
|
||||
export function pathCallAtParent<T extends Node, R>(parent: T, fn: (parent: T) => R): R {
|
||||
return ctx.path.callParent(() => {
|
||||
__DEV__: assertPathAtNode("pathCallParent", parent);
|
||||
return fn(parent);
|
||||
});
|
||||
}
|
||||
export function pathCallParentOf<T extends Node, R>(child: Node, fn: (parent: T) => R): R {
|
||||
__DEV__: assertPathAtNode("pathCallParentOf", child);
|
||||
return ctx.path.callParent((p) => fn(getNode() as any));
|
||||
}
|
||||
|
||||
export function pathCallTopMostIfBlockExpression<R>(node: IfBlockExpression, fn: (node: IfBlockExpression) => R): R {
|
||||
const parent = getParentNode(node)!;
|
||||
return is_ElseBlock(node, parent) ? pathCallAtParent(parent, (parent) => pathCallTopMostIfBlockExpression(parent, fn)) : fn(node);
|
||||
}
|
||||
|
||||
function print(property?: any, args?: any): Doc | Doc[] {
|
||||
if (!property) return ctx.print(undefined!, args);
|
||||
if (Array.isArray(property)) return ctx.print(property as any, args);
|
||||
const value = get(property);
|
||||
return !!value ? (Array.isArray(value) ? ctx.path.map(ctx.print, property) : ctx.print(property, args)) : "";
|
||||
}
|
||||
|
||||
namespace print {
|
||||
export function b(property: string, res = `${property} `): Doc {
|
||||
return has(property) ? res : "";
|
||||
}
|
||||
export function map(property: string, mapItem?: MapFn<any, any>): Doc[] {
|
||||
return !has(property) ? [] : ctx.path.map(mapItem ? (p, i, a) => mapItem(a[i], i, a) : () => ctx.print(), property);
|
||||
}
|
||||
export function join(property: string, sep: SepFn<any, any> | Doc, trailingSep: TrailingSepFn<any, any> | Doc = ""): Doc[] {
|
||||
return map_join(property, () => ctx.print(), sep, trailingSep);
|
||||
}
|
||||
export function map_join(
|
||||
property: string,
|
||||
mapFn: MapFn<any, any>,
|
||||
sep: SepFn<any, any> | Doc,
|
||||
sepTrailing: TrailingSepFn<any, any> | Doc = ""
|
||||
) {
|
||||
const sepFn = typeof sep === "function" ? sep : () => sep;
|
||||
return map(property, (v, i, a) => [
|
||||
mapFn(v, i, a),
|
||||
iLast(i, a as any)
|
||||
? typeof sepTrailing === "function"
|
||||
? sepTrailing(v)
|
||||
: sepTrailing
|
||||
: sepFn(v, a[i + 1], i === 0 ? undefined : a[i - 1]),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
// prettier-ignore
|
||||
type SepFn<T extends Node = Node, K extends AK<T> = AK<T>> = <A extends AV<T, K>>(item: A[number], next_item: A[number], prev_item: A[number] | undefined) => Doc;
|
||||
type MapFn<T extends Node = Node, K extends AK<T> = AK<T>> = <A extends AV<T, K>>(item: A[number], index: number, arr: A) => Doc;
|
||||
type TrailingSepFn<T extends Node = Node, K extends AK<T> = AK<T>> = <A extends AV<T, K>>(item: A[number]) => Doc;
|
||||
type AV<T extends Node, K extends keyof T> = Extract<NonNullable<T[K]>, ReadonlyArray<unknown>>;
|
||||
type AK<T extends Node> = keyof ArrayProps<T> & keyof T;
|
||||
// type AK<T extends Node> = keyof PickProps<T, {nodeType:number}|{nodeType:number}[]> & keyof T;
|
||||
|
||||
export interface print<T extends Node> {
|
||||
(property?: [], args?: any): Doc;
|
||||
(property?: [AK<T>, number], args?: any): Doc;
|
||||
(property?: AK<T>, args?: any): Doc[];
|
||||
// (property?: T extends {rules:{nodeType:number}|{nodeType:number}[]} ? "rules" : never, args?: any): Doc[];
|
||||
(property?: keyof NodeProps<T> & keyof T, args?: any): Doc;
|
||||
b(property: keyof BoolProps<T>, res?: string): Doc;
|
||||
map<K extends AK<T>>(property: K & keyof ArrayProps<T>, mapFn?: MapFn<T, K>): Doc[];
|
||||
join<K extends AK<T>>(property: K, sep: SepFn<T, K> | Doc, trailingSep?: TrailingSepFn<T, K> | Doc): Doc[];
|
||||
map_join<K extends AK<T>>(property: K, mapFn: MapFn<T, K>, sep: SepFn<T, K> | Doc, trailingSep?: TrailingSepFn<T, K> | Doc): Doc[];
|
||||
}
|
||||
|
||||
function genericPrint() {
|
||||
return withCheckContext(() => {
|
||||
const node = getNode();
|
||||
__DEV__: assert(node.nodeType in printer);
|
||||
|
||||
let printed: Doc = hasPrettierIgnore(node) //
|
||||
? node.loc.getOwnText()
|
||||
: printer[node.nodeType]!(print as any, node as never);
|
||||
|
||||
const inner_parens = needsInnerParens(node);
|
||||
|
||||
if (inner_parens) {
|
||||
printed = group(["(", printed, ")"]);
|
||||
}
|
||||
|
||||
if (hasAttributes(node)) {
|
||||
const print_above = shouldPrintOuterAttributesAbove(node); /* || node.attributes.length > 1 */
|
||||
printed = [
|
||||
...print.join(
|
||||
"attributes",
|
||||
(attr) =>
|
||||
print_above
|
||||
? maybeEmptyLine(attr)
|
||||
: is_LineCommentNode(attr) || (is_BlockCommentNode(attr) && hasBreaklineAfter(attr))
|
||||
? hardline
|
||||
: " ",
|
||||
(attr) =>
|
||||
print_above && is_DocCommentAttribute(attr)
|
||||
? maybeEmptyLine(attr)
|
||||
: print_above || is_LineCommentNode(attr) || (is_BlockCommentNode(attr) && hasBreaklineAfter(attr))
|
||||
? hardline
|
||||
: " "
|
||||
),
|
||||
printed,
|
||||
];
|
||||
}
|
||||
|
||||
printed = withComments(
|
||||
node,
|
||||
printed,
|
||||
hasPrettierIgnore(node) || ((is_Attribute(node) || is_MacroInvocation(node)) && !isTransformed(node))
|
||||
? escapeComments(0, (comment) => node.loc.ownContains(comment))
|
||||
: is_MacroRule(node)
|
||||
? escapeComments(0, (comment) => node.transform.loc.contains(comment))
|
||||
: is_UnionPattern(getParentNode() ?? ({ nodeType: 0 } as any))
|
||||
? new Set(getComments(node, CF.Leading | CF.Trailing, (comment) => !isDangling(comment)))
|
||||
: undefined
|
||||
);
|
||||
|
||||
if (!inner_parens && needsOuterSoftbreakParens(node)) {
|
||||
printed = [group(["(", indent([softline, printed]), softline, ")"])];
|
||||
}
|
||||
|
||||
return printed;
|
||||
});
|
||||
|
||||
function hasPrettierIgnore(node: Node) {
|
||||
return (
|
||||
(node as any).prettierIgnore ||
|
||||
hasComment(node, CF.PrettierIgnore) ||
|
||||
(hasAttributes(node) && node.attributes.some(isPrettierIgnoreAttribute))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function canAttachComment(n: Node) {
|
||||
return !is_Comment(n) && !isNoopExpressionStatement(n) && !is_MissingNode(n) && !is_PunctuationToken(n);
|
||||
}
|
||||
|
||||
export const plugin: Plugin<Node> = {
|
||||
languages: [
|
||||
{
|
||||
name: "Rust",
|
||||
aliases: ["rs"],
|
||||
parsers: ["rust"],
|
||||
extensions: [".rs", ".rs.in"],
|
||||
linguistLanguageId: 327,
|
||||
vscodeLanguageIds: ["rust"],
|
||||
tmScope: "source.rust",
|
||||
aceMode: "rust",
|
||||
codemirrorMode: "rust",
|
||||
codemirrorMimeType: "text/x-rustsrc",
|
||||
},
|
||||
],
|
||||
parsers: {
|
||||
"rust": {
|
||||
astFormat: "rust",
|
||||
locStart: start,
|
||||
locEnd: end,
|
||||
parse(code: string, options: ParserOptions<Node> & Partial<CustomOptions>) {
|
||||
const customOptions = options as CustomOptions;
|
||||
ctx = { options: customOptions } as any;
|
||||
|
||||
customOptions.rsParsedFile = rs.parseFile((customOptions.originalText = code), { filepath: customOptions.filepath });
|
||||
|
||||
customOptions.actuallyMethodNodes = new WeakSet();
|
||||
customOptions.danglingAttributes = [];
|
||||
customOptions.comments = [];
|
||||
|
||||
transform_ast(customOptions);
|
||||
|
||||
const comments: AttributeOrComment[] = [];
|
||||
insertNodes(comments, customOptions.comments);
|
||||
insertNodes(comments, customOptions.danglingAttributes);
|
||||
|
||||
// @ts-expect-error
|
||||
customOptions.rsParsedFile.program.comments = comments;
|
||||
|
||||
customOptions.commentSpans = new Map(comments.map((n) => [start(n), end(n)]));
|
||||
|
||||
return customOptions.rsParsedFile.program;
|
||||
},
|
||||
},
|
||||
},
|
||||
printers: {
|
||||
"rust": {
|
||||
preprocess: (node: Node) => (node as Program).loc?.src || node,
|
||||
print(path, options, print, args) {
|
||||
if (path.stack.length === 1) {
|
||||
__DEV__: Narrow<CustomOptions>(options);
|
||||
ctx = { path, options, print: print as any, args };
|
||||
try {
|
||||
const printed = genericPrint();
|
||||
__DEV__: devEndCheck(printed);
|
||||
return printed;
|
||||
} finally {
|
||||
ctx = undefined!;
|
||||
}
|
||||
} else if (args || ctx.args) {
|
||||
const prev_args = ctx.args;
|
||||
try {
|
||||
ctx.args = args;
|
||||
return genericPrint();
|
||||
} finally {
|
||||
ctx.args = prev_args;
|
||||
}
|
||||
} else {
|
||||
return genericPrint();
|
||||
}
|
||||
},
|
||||
hasPrettierIgnore: () => false,
|
||||
willPrintOwnComments: () => true,
|
||||
isBlockComment: (node: Node): boolean => {
|
||||
return is_AttributeOrComment(node) && is_BlockCommentKind(node as any);
|
||||
},
|
||||
canAttachComment: canAttachComment,
|
||||
getCommentChildNodes: getCommentChildNodes,
|
||||
printComment: genericPrint,
|
||||
handleComments: {
|
||||
// @ts-expect-error
|
||||
avoidAstMutation: true,
|
||||
ownLine: handleOwnLineComment,
|
||||
endOfLine: handleEndOfLineComment,
|
||||
remaining: handleRemainingComment,
|
||||
},
|
||||
},
|
||||
},
|
||||
options: {},
|
||||
defaultOptions: {
|
||||
// default prettier (2) -> rustfmt (4)
|
||||
tabWidth: 4,
|
||||
// default prettier (80) -> rustfmt (100)
|
||||
printWidth: 100,
|
||||
},
|
||||
};
|
||||
|
||||
function devEndCheck(printed: Doc) {
|
||||
let first = false;
|
||||
const comments = getAllComments();
|
||||
each(comments, (comment, index) => {
|
||||
if (!comment.printed) {
|
||||
if (!first) (first = true), console.log(color.red(`Unprinted comments:`));
|
||||
const len = 40;
|
||||
const msg =
|
||||
color.magenta(
|
||||
(comment.marker ? `Dangling "${comment.marker}" ` : "") +
|
||||
(is_Attribute(comment) ? "Attribute " : is_DocCommentAttribute(comment) ? "DocCommentAttribute" : "Comment") +
|
||||
` ${index}/${comments.length}` +
|
||||
color.yellow(` ${print_string(comment.loc.sliceText(0, len))}${comment.loc.len() > len ? " ..." : ""}`)
|
||||
) + color.grey(`\n at ${comment.loc.url()}`);
|
||||
if (globalThis.TESTS_FORMAT_DEV) exit(msg);
|
||||
else console.log(msg);
|
||||
setDidPrintComment(comment);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1,719 +0,0 @@
|
||||
import { DelimKind, Node, NodeType, NTMap } from "jinx-rust";
|
||||
import {
|
||||
getDelimChars,
|
||||
hasSuffix,
|
||||
is_ArrayOrTupleLiteral,
|
||||
is_BlockExpression,
|
||||
is_ClosureFunctionExpression,
|
||||
is_Identifier,
|
||||
is_IfBlockExpression,
|
||||
is_LiteralNumberLike,
|
||||
is_StructLiteral,
|
||||
start,
|
||||
} from "jinx-rust/utils";
|
||||
import {
|
||||
BlockLikeMacroInvocation,
|
||||
CallLikeMacroInvocation,
|
||||
is_BlockLikeMacroInvocation,
|
||||
is_CallLikeMacroInvocation,
|
||||
isTransformed,
|
||||
} from "../transform";
|
||||
import { exit } from "../utils/common";
|
||||
import { hasComment, print_comment } from "./comments";
|
||||
import { isSimpleType } from "./complexity";
|
||||
import {
|
||||
adjustClause,
|
||||
parenthesize_if_break,
|
||||
printAnnotatedPattern,
|
||||
printArrayLike,
|
||||
printArrowFunction,
|
||||
printAssignment,
|
||||
printBinaryishExpression,
|
||||
printBlockBody,
|
||||
printBodyOrCases,
|
||||
printCallArguments,
|
||||
printCallExpression,
|
||||
printCondition,
|
||||
printDanglingCommentsForInline,
|
||||
printDeclarationTypeBounds,
|
||||
printEnumBody,
|
||||
printFlowControlExpression,
|
||||
printGenerics_x_whereBounds,
|
||||
printIfBlock,
|
||||
printIfBlockCondition,
|
||||
printImplTraitForType,
|
||||
printLtBounds,
|
||||
printLtParameters,
|
||||
printMacroRules,
|
||||
printMaybeBlockBody,
|
||||
printMemberExpression,
|
||||
printNumber,
|
||||
printObject,
|
||||
printParametersAndReturnType,
|
||||
printRuleMatch,
|
||||
printRuleTransform,
|
||||
printTypeAnnotation,
|
||||
printTypeArguments,
|
||||
printTypeBounds,
|
||||
printUnaryExpression,
|
||||
printUnionPattern,
|
||||
} from "./core";
|
||||
import { DCM, Doc, group, hardline, ifBreak, indent, join, line, softline, willBreak } from "./external";
|
||||
import { f, getOptions, getParentNode, pathCall, sg_duo, sg_single, type print } from "./plugin";
|
||||
import { needsParens, stmtNeedsSemi } from "./styling";
|
||||
|
||||
type nPrint<T extends Node> = (print: print<T>, node: T) => Doc | never;
|
||||
|
||||
export const printer: { [K in NodeType]: nPrint<Extract<NTMap[K], Node>> } = {
|
||||
[NodeType.MissingNode](print, node) {
|
||||
return "";
|
||||
},
|
||||
[NodeType.SourceFile](print, node) {
|
||||
return [
|
||||
print.b("UTF8BOM", "\uFEFF"), //
|
||||
print("shebang"),
|
||||
print("program"),
|
||||
];
|
||||
},
|
||||
[NodeType.Shebang](print, node) {
|
||||
return [`#!${node.value}`, hardline];
|
||||
},
|
||||
[NodeType.Program](print, node) {
|
||||
return printBodyOrCases(print, node);
|
||||
},
|
||||
[NodeType.Snippet](print, node) {
|
||||
exit.never();
|
||||
},
|
||||
[NodeType.Identifier](print, node) {
|
||||
return node.name;
|
||||
},
|
||||
[NodeType.Index](print, node) {
|
||||
return node.name;
|
||||
},
|
||||
[NodeType.LbIdentifier](print, node) {
|
||||
return node.name;
|
||||
},
|
||||
[NodeType.McIdentifier](print, node) {
|
||||
return node.name;
|
||||
},
|
||||
[NodeType.LtIdentifier](print, node) {
|
||||
return node.name;
|
||||
},
|
||||
[NodeType.PunctuationToken](print, node) {
|
||||
return node.token;
|
||||
},
|
||||
[NodeType.DelimGroup](print, node) {
|
||||
return node.loc.getOwnText();
|
||||
},
|
||||
[NodeType.Literal](print, node) {
|
||||
let { value } = node;
|
||||
if (is_LiteralNumberLike(node)) value = printNumber(value);
|
||||
return hasSuffix(node) ? [value, print("suffix")] : value;
|
||||
},
|
||||
[NodeType.ItemPath](print, node) {
|
||||
return [print("namespace"), "::", print("segment")];
|
||||
},
|
||||
[NodeType.ExpressionPath](print, node) {
|
||||
return [print("namespace"), "::", print("segment")];
|
||||
},
|
||||
[NodeType.TypePath](print, node) {
|
||||
return [print("namespace"), "::", print("segment")];
|
||||
},
|
||||
[NodeType.Comment](print, node) {
|
||||
return print_comment(node);
|
||||
},
|
||||
[NodeType.DocCommentAttribute](print, node) {
|
||||
return print_comment(node);
|
||||
},
|
||||
[NodeType.Attribute](print, node) {
|
||||
return [
|
||||
node.inner ? "#![" : "#[",
|
||||
isTransformed(node)
|
||||
? [print("segments"), printDanglingCommentsForInline(node)] //
|
||||
: node.segments.loc.sliceText(1, -1).trim(),
|
||||
"]",
|
||||
];
|
||||
},
|
||||
[NodeType.MacroInvocation](print, node) {
|
||||
const hasCurlyBrackets = node.segments.dk === DelimKind["{}"];
|
||||
const delim = getDelimChars(node.segments);
|
||||
if (node.segments.length === 0) {
|
||||
return [print("callee"), "!", hasCurlyBrackets ? " " : "", delim.left, printDanglingCommentsForInline(node), delim.right];
|
||||
}
|
||||
if (isTransformed(node)) {
|
||||
if (is_CallLikeMacroInvocation(node)) {
|
||||
return [print("callee"), "!", printCallArguments(print as print<CallLikeMacroInvocation>, node)];
|
||||
}
|
||||
if (is_BlockLikeMacroInvocation(node)) {
|
||||
return [print("callee"), "!", " ", printBlockBody(print as print<BlockLikeMacroInvocation>, node)];
|
||||
}
|
||||
}
|
||||
let content = node.segments.loc.sliceText(1, -1);
|
||||
if (content.trim().length === 0) {
|
||||
content = "";
|
||||
} else if (!content.includes("\n")) {
|
||||
content = content.trim();
|
||||
if (hasCurlyBrackets) content = " " + content + " ";
|
||||
}
|
||||
return [print("callee"), "!", hasCurlyBrackets ? " " : "", delim.left, content, delim.right];
|
||||
},
|
||||
[NodeType.MacroRulesDeclaration](print, node) {
|
||||
return ["macro_rules! ", print("id"), printMacroRules(print, node)];
|
||||
},
|
||||
[NodeType.MacroRuleDeclaration](print, node) {
|
||||
return [printRuleMatch(print, node), " => ", printRuleTransform(print, node), ";"];
|
||||
},
|
||||
[NodeType.MacroDeclaration](print, node) {
|
||||
return [print("pub"), "macro ", print("id"), printMacroRules(print, node)];
|
||||
},
|
||||
[NodeType.MacroInlineRuleDeclaration](print, node) {
|
||||
return [printRuleMatch(print, node), " ", printRuleTransform(print, node)];
|
||||
},
|
||||
[NodeType.MacroGroup](print, node) {
|
||||
return node.loc.getOwnText();
|
||||
},
|
||||
[NodeType.MacroParameterDeclaration](print, node) {
|
||||
return [print("id"), ":", print("ty")];
|
||||
},
|
||||
[NodeType.PubSpecifier](print, node) {
|
||||
if (!node.location) return "pub ";
|
||||
if (is_Identifier(node.location)) {
|
||||
switch (node.location.name) {
|
||||
case "crate":
|
||||
if (start(node) === start(node.location)) {
|
||||
return "crate ";
|
||||
} else {
|
||||
return ["pub(", print("location"), ") "];
|
||||
}
|
||||
case "self":
|
||||
case "super":
|
||||
return ["pub(", print("location"), ") "];
|
||||
}
|
||||
}
|
||||
return ["pub(in ", print("location"), ") "];
|
||||
},
|
||||
[NodeType.ExternSpecifier](print, node) {
|
||||
return ["extern ", f`${print("abi")} `];
|
||||
},
|
||||
[NodeType.ExpressionStatement](print, node) {
|
||||
return [print("expression"), stmtNeedsSemi(node) ? ";" : ""];
|
||||
},
|
||||
[NodeType.UseStatement](print, node) {
|
||||
return [print("pub"), "use ", print("import"), ";"];
|
||||
},
|
||||
[NodeType.DestructuredImport](print, node) {
|
||||
if (node.specifiers.length === 0) return [print("source"), "::{", printDanglingCommentsForInline(node, DCM["specifiers"]), "}"];
|
||||
let space = true;
|
||||
__DEV__: if (globalThis.TESTS_FORMAT_DEV) space = false;
|
||||
return [
|
||||
print("source"),
|
||||
group([
|
||||
"::{",
|
||||
indent([space ? line : softline, join([",", line], print("specifiers")), ifBreak(",")]),
|
||||
space ? line : softline,
|
||||
"}",
|
||||
]),
|
||||
];
|
||||
},
|
||||
[NodeType.AmbientImport](print, node) {
|
||||
return f`${print("source")}::*` || "*";
|
||||
},
|
||||
[NodeType.AnonymousImport](print, node) {
|
||||
return [print("source"), " as ", "_"];
|
||||
},
|
||||
[NodeType.NamedImport](print, node) {
|
||||
return [print("source"), f` as ${print("local")}`];
|
||||
},
|
||||
[NodeType.ExternCrateStatement](print, node) {
|
||||
return [print("pub"), "extern crate ", print("import"), ";"];
|
||||
},
|
||||
[NodeType.TypeAliasDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
"type",
|
||||
printAssignment(
|
||||
printGenerics_x_whereBounds(print, node, printDeclarationTypeBounds(print, node, ":")), //
|
||||
" =",
|
||||
"typeExpression"
|
||||
),
|
||||
";",
|
||||
];
|
||||
},
|
||||
[NodeType.LetVariableDeclaration](print, node) {
|
||||
return [
|
||||
"let ",
|
||||
printAssignment(
|
||||
printAnnotatedPattern(print, node), //
|
||||
" =",
|
||||
"expression"
|
||||
),
|
||||
f` else ${print("else")}`,
|
||||
";",
|
||||
];
|
||||
},
|
||||
[NodeType.ConstVariableDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
"const ",
|
||||
printAssignment(
|
||||
printAnnotatedPattern(print, node), //
|
||||
" =",
|
||||
"expression"
|
||||
),
|
||||
";",
|
||||
];
|
||||
},
|
||||
[NodeType.StaticVariableDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
"static ",
|
||||
printAssignment(
|
||||
printAnnotatedPattern(print, node), //
|
||||
" =",
|
||||
"expression"
|
||||
),
|
||||
";",
|
||||
];
|
||||
},
|
||||
[NodeType.ModuleDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"), //
|
||||
print.b("unsafe"),
|
||||
"mod ",
|
||||
print("id"),
|
||||
printMaybeBlockBody(print, node),
|
||||
];
|
||||
},
|
||||
[NodeType.ExternBlockDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"), //
|
||||
print.b("unsafe"),
|
||||
"extern ",
|
||||
f`${print("abi")} `,
|
||||
printBlockBody(print, node),
|
||||
];
|
||||
},
|
||||
[NodeType.FunctionDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
print.b("const"),
|
||||
print.b("async"),
|
||||
print.b("unsafe"),
|
||||
print("extern"),
|
||||
"fn",
|
||||
printGenerics_x_whereBounds(print, node, printParametersAndReturnType(node)),
|
||||
printMaybeBlockBody(print, node),
|
||||
];
|
||||
},
|
||||
[NodeType.FunctionSelfParameterDeclaration](print, node) {
|
||||
return group([print.b("ref", "&"), f`${print("lt")} `, print.b("mut"), "self", printTypeAnnotation(print, node)]);
|
||||
},
|
||||
[NodeType.FunctionParameterDeclaration](print, node) {
|
||||
return group(printAnnotatedPattern(print, node));
|
||||
},
|
||||
[NodeType.FunctionSpread](print, node) {
|
||||
return "...";
|
||||
},
|
||||
[NodeType.StructDeclaration](print, node) {
|
||||
return [print("pub"), "struct", printGenerics_x_whereBounds(print, node, ""), printObject(print, node)];
|
||||
},
|
||||
[NodeType.StructPropertyDeclaration](print, node) {
|
||||
return [print("pub"), print("id"), printTypeAnnotation(print, node)];
|
||||
},
|
||||
[NodeType.TupleStructDeclaration](print, node) {
|
||||
return [print("pub"), "struct", printGenerics_x_whereBounds(print, node, printArrayLike(print, node)), ";"];
|
||||
},
|
||||
[NodeType.TupleStructItemDeclaration](print, node) {
|
||||
return [print("pub"), print("typeAnnotation")];
|
||||
},
|
||||
[NodeType.UnionDeclaration](print, node) {
|
||||
return [print("pub"), "union", printGenerics_x_whereBounds(print, node, ""), printObject(print, node)];
|
||||
},
|
||||
[NodeType.EnumDeclaration](print, node) {
|
||||
return [print("pub"), "enum", printGenerics_x_whereBounds(print, node, ""), printEnumBody(print, node)];
|
||||
},
|
||||
[NodeType.EnumMemberDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
printAssignment(
|
||||
print("id"), //
|
||||
" =",
|
||||
"value"
|
||||
),
|
||||
];
|
||||
},
|
||||
[NodeType.EnumMemberTupleDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
printAssignment(
|
||||
[print("id"), printArrayLike(print, node)], //
|
||||
" =",
|
||||
"value"
|
||||
),
|
||||
];
|
||||
},
|
||||
[NodeType.EnumMemberStructDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
printAssignment(
|
||||
[print("id"), printObject(print, node)], //
|
||||
" =",
|
||||
"value"
|
||||
),
|
||||
];
|
||||
},
|
||||
[NodeType.TraitDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
print.b("unsafe"),
|
||||
"trait",
|
||||
printGenerics_x_whereBounds(print, node, printDeclarationTypeBounds(print, node, ":")),
|
||||
adjustClause(node, printBlockBody(print, node)),
|
||||
];
|
||||
},
|
||||
[NodeType.AutoTraitDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
print.b("unsafe"),
|
||||
"auto trait ",
|
||||
print("id"),
|
||||
" ",
|
||||
printBlockBody(print, node as any), // see "transform.ts"
|
||||
];
|
||||
},
|
||||
[NodeType.TraitAliasDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
print.b("unsafe"),
|
||||
"trait",
|
||||
printGenerics_x_whereBounds(print, node, printDeclarationTypeBounds(print, node, " =")),
|
||||
";",
|
||||
];
|
||||
},
|
||||
[NodeType.ImplDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
print.b("unsafe"),
|
||||
"impl",
|
||||
printGenerics_x_whereBounds(print, node, [print.b("const"), printImplTraitForType(print, node)]),
|
||||
adjustClause(node, printBlockBody(print, node)),
|
||||
];
|
||||
},
|
||||
[NodeType.NegativeImplDeclaration](print, node) {
|
||||
return [
|
||||
print("pub"),
|
||||
"impl",
|
||||
printGenerics_x_whereBounds(print, node, ["!", printImplTraitForType(print, node)]),
|
||||
" ",
|
||||
printBlockBody(print, node as any), // see "transform.ts"
|
||||
];
|
||||
},
|
||||
[NodeType.ExpressionTypeSelector](print, node) {
|
||||
return group(["<", print("typeTarget"), f` as ${print("typeExpression")}`, ">"]);
|
||||
},
|
||||
[NodeType.ExpressionTypeCast](print, node) {
|
||||
return [print("typeCallee"), f`::${printTypeArguments(print, node)}`];
|
||||
},
|
||||
[NodeType.ExpressionAsTypeCast](print, node) {
|
||||
return [print("expression"), " as ", print("typeExpression")];
|
||||
},
|
||||
[NodeType.ReturnExpression](print, node) {
|
||||
return ["return", printFlowControlExpression(print, node)];
|
||||
},
|
||||
[NodeType.BreakExpression](print, node) {
|
||||
return ["break", f` ${print("label")}`, printFlowControlExpression(print, node)];
|
||||
},
|
||||
[NodeType.ContinueExpression](print, node) {
|
||||
return ["continue", f` ${print("label")}`];
|
||||
},
|
||||
[NodeType.YieldExpression](print, node) {
|
||||
return ["yield", printFlowControlExpression(print, node)];
|
||||
},
|
||||
[NodeType.RangeLiteral](print, node) {
|
||||
return [print("lower"), "..", print.b("last", "="), print("upper")];
|
||||
},
|
||||
[NodeType.CallExpression](print, node) {
|
||||
return printCallExpression(print, node);
|
||||
},
|
||||
[NodeType.MemberExpression](print, node) {
|
||||
return printMemberExpression(print, node);
|
||||
},
|
||||
[NodeType.AwaitExpression](print, node) {
|
||||
return [print("expression"), ".await"];
|
||||
},
|
||||
[NodeType.UnwrapExpression](print, node) {
|
||||
return [print("expression"), "?"];
|
||||
},
|
||||
[NodeType.ParenthesizedExpression](print, node) {
|
||||
exit.never();
|
||||
const shouldHug = !hasComment(node.expression) && (is_ArrayOrTupleLiteral(node.expression) || is_StructLiteral(node.expression));
|
||||
if (shouldHug) return ["(", print("expression"), ")"];
|
||||
return group(["(", indent([softline, print("expression")]), softline, ")"]);
|
||||
},
|
||||
[NodeType.MinusExpression](print, node) {
|
||||
return printUnaryExpression("-", node);
|
||||
},
|
||||
[NodeType.NotExpression](print, node) {
|
||||
return printUnaryExpression("!", node);
|
||||
},
|
||||
[NodeType.OrExpression](print, node) {
|
||||
return printBinaryishExpression(print, node);
|
||||
},
|
||||
[NodeType.AndExpression](print, node) {
|
||||
return printBinaryishExpression(print, node);
|
||||
},
|
||||
[NodeType.ReassignmentExpression](print, node) {
|
||||
return printAssignment(print("left"), " =", "right");
|
||||
},
|
||||
[NodeType.UnassignedExpression](print, node) {
|
||||
return "_";
|
||||
},
|
||||
[NodeType.OperationExpression](print, node) {
|
||||
return printBinaryishExpression(print, node);
|
||||
},
|
||||
[NodeType.ReassignmentOperationExpression](print, node) {
|
||||
return printAssignment(print("left"), " " + node.kind, "right");
|
||||
},
|
||||
[NodeType.ComparisonExpression](print, node) {
|
||||
return printBinaryishExpression(print, node);
|
||||
},
|
||||
[NodeType.LetScrutinee](print, node) {
|
||||
return ["let ", printAssignment(print("pattern"), " =", "expression")];
|
||||
},
|
||||
[NodeType.ClosureFunctionExpression](print, node) {
|
||||
return printArrowFunction(print, node);
|
||||
},
|
||||
[NodeType.ClosureFunctionParameterDeclaration](print, node) {
|
||||
return group(printAnnotatedPattern(print, node));
|
||||
},
|
||||
[NodeType.BlockExpression](print, node) {
|
||||
return [
|
||||
f`${print("label")}: `,
|
||||
print.b("const"),
|
||||
print.b("async"),
|
||||
print.b("move"),
|
||||
print.b("unsafe"),
|
||||
printBlockBody(print, node),
|
||||
];
|
||||
},
|
||||
[NodeType.LoopBlockExpression](print, node) {
|
||||
return [f`${print("label")}: `, "loop ", printBlockBody(print, node)];
|
||||
},
|
||||
[NodeType.WhileBlockExpression](print, node) {
|
||||
return [f`${print("label")}: `, "while ", printCondition(print, node), printBlockBody(print, node)];
|
||||
},
|
||||
[NodeType.ForInBlockExpression](print, node) {
|
||||
return [f`${print("label")}: `, "for ", print("pattern"), " in ", print("expression"), " ", printBlockBody(print, node)];
|
||||
},
|
||||
[NodeType.IfBlockExpression](print, node) {
|
||||
return [f`${print("label")}: `, printIfBlock(print, node)];
|
||||
},
|
||||
[NodeType.TryBlockExpression](print, node) {
|
||||
return [f`${print("label")}: `, "try ", printBlockBody(print, node)];
|
||||
},
|
||||
[NodeType.MatchExpression](print, node) {
|
||||
const id = Symbol("match");
|
||||
const expr = print("expression");
|
||||
const needs_parens = pathCall(node, "expression", needsParens);
|
||||
|
||||
let printed: Doc = [
|
||||
f`${print("label")}: `,
|
||||
"match ",
|
||||
needs_parens ? expr : group([indent([softline, expr]), softline], { id }),
|
||||
needs_parens ? " " : !willBreak(expr) ? ifBreak("", " ", { groupId: id }) : "" /* ifBreak("", " ", { groupId: id }) */,
|
||||
printBlockBody(print, node),
|
||||
];
|
||||
|
||||
const parent = getParentNode()!;
|
||||
if (is_ClosureFunctionExpression(parent) && parent.expression === node) {
|
||||
printed = parenthesize_if_break([indent([softline, printed]), softline]);
|
||||
}
|
||||
return printed;
|
||||
},
|
||||
[NodeType.MatchExpressionCase](print, node) {
|
||||
return group([
|
||||
group(print("pattern")),
|
||||
" ",
|
||||
printIfBlockCondition(print, node),
|
||||
"=>", //
|
||||
(is_BlockExpression(node.expression) || is_IfBlockExpression(node.expression)) &&
|
||||
!hasComment(node.expression, 0, (comment) => getOptions().danglingAttributes.includes(comment as any))
|
||||
? [" ", print("expression")]
|
||||
: group(indent([line, print("expression")])),
|
||||
]);
|
||||
return printAssignment(
|
||||
[print("pattern"), " ", printIfBlockCondition(print, node)], //
|
||||
"=>",
|
||||
"expression"
|
||||
);
|
||||
return [print("pattern"), " ", printIfBlockCondition(print, node)];
|
||||
},
|
||||
[NodeType.StructLiteral](print, node) {
|
||||
return [print("struct"), printObject(print, node)];
|
||||
},
|
||||
[NodeType.StructLiteralPropertyShorthand](print, node) {
|
||||
return print("value");
|
||||
},
|
||||
[NodeType.StructLiteralProperty](print, node) {
|
||||
return [print("key"), ": ", print("value")];
|
||||
},
|
||||
[NodeType.StructLiteralPropertySpread](print, node) {
|
||||
return ["..", print("expression")];
|
||||
},
|
||||
[NodeType.StructLiteralRestUnassigned](print, node) {
|
||||
return "..";
|
||||
},
|
||||
[NodeType.ArrayLiteral](print, node) {
|
||||
return printArrayLike(print, node);
|
||||
},
|
||||
[NodeType.SizedArrayLiteral](print, node) {
|
||||
return sg_duo`[${print("initExpression")};${print("sizeExpression")}]`;
|
||||
},
|
||||
[NodeType.TupleLiteral](print, node) {
|
||||
return printArrayLike(print, node);
|
||||
},
|
||||
[NodeType.ReferenceExpression](print, node) {
|
||||
return printUnaryExpression(["&", print.b("mut")], node);
|
||||
},
|
||||
[NodeType.RawReferenceExpression](print, node) {
|
||||
return printUnaryExpression(`&raw ${node.kind} `, node);
|
||||
},
|
||||
[NodeType.DereferenceExpression](print, node) {
|
||||
return printUnaryExpression("*", node);
|
||||
},
|
||||
[NodeType.BoxExpression](print, node) {
|
||||
return printUnaryExpression("box ", node);
|
||||
},
|
||||
[NodeType.UnionPattern](print, node) {
|
||||
return printUnionPattern(print, node);
|
||||
},
|
||||
[NodeType.ParenthesizedPattern](print, node) {
|
||||
exit.never();
|
||||
return sg_single`(${print("pattern")})`;
|
||||
},
|
||||
[NodeType.RestPattern](print, node) {
|
||||
return "..";
|
||||
},
|
||||
[NodeType.WildcardPattern](print, node) {
|
||||
return "_";
|
||||
},
|
||||
[NodeType.PatternVariableDeclaration](print, node) {
|
||||
return [print.b("ref"), print.b("mut"), printAssignment(print("id"), " @", "pattern")];
|
||||
},
|
||||
[NodeType.StructPattern](print, node) {
|
||||
return [print("struct"), printObject(print, node)];
|
||||
},
|
||||
[NodeType.StructPatternPropertyDestructured](print, node) {
|
||||
return [print("key"), ": ", print("pattern")];
|
||||
},
|
||||
[NodeType.StructPatternPropertyShorthand](print, node) {
|
||||
return [print.b("box"), print.b("ref"), print.b("mut"), print("id")];
|
||||
},
|
||||
[NodeType.TuplePattern](print, node) {
|
||||
return [print("struct"), printArrayLike(print, node)];
|
||||
},
|
||||
[NodeType.ArrayPattern](print, node) {
|
||||
return printArrayLike(print, node);
|
||||
},
|
||||
[NodeType.ReferencePattern](print, node) {
|
||||
return ["&", print.b("mut"), print("pattern")];
|
||||
},
|
||||
[NodeType.BoxPattern](print, node) {
|
||||
return ["box ", print("pattern")];
|
||||
},
|
||||
[NodeType.MinusPattern](print, node) {
|
||||
return ["-", print("pattern")];
|
||||
},
|
||||
[NodeType.RangePattern](print, node) {
|
||||
return [print("lower"), "..", print.b("last", "="), print("upper")];
|
||||
},
|
||||
[NodeType.TypeCall](print, node) {
|
||||
return [print("typeCallee"), printTypeArguments(print, node)];
|
||||
},
|
||||
[NodeType.TypeCallNamedArgument](print, node) {
|
||||
return printAssignment(print("target"), " =", "typeExpression");
|
||||
},
|
||||
[NodeType.TypeCallNamedBound](print, node) {
|
||||
return [print("typeTarget"), printTypeBounds(":", print, node)];
|
||||
},
|
||||
[NodeType.LtElided](print, node) {
|
||||
return "'_";
|
||||
},
|
||||
[NodeType.LtStatic](print, node) {
|
||||
return "'static";
|
||||
},
|
||||
[NodeType.TypeNever](print, node) {
|
||||
return "!";
|
||||
},
|
||||
[NodeType.TypeInferred](print, node) {
|
||||
return "_";
|
||||
},
|
||||
[NodeType.GenericTypeParameterDeclaration](print, node) {
|
||||
return printAssignment(
|
||||
[print("id"), printTypeBounds(":", print, node)], //
|
||||
" =",
|
||||
"typeDefault"
|
||||
);
|
||||
},
|
||||
[NodeType.ConstTypeParameterDeclaration](print, node) {
|
||||
return [
|
||||
"const ",
|
||||
printAssignment(
|
||||
[print("id"), printTypeAnnotation(print, node)], //
|
||||
" =",
|
||||
"typeDefault"
|
||||
),
|
||||
];
|
||||
},
|
||||
[NodeType.GenericLtParameterDeclaration](print, node) {
|
||||
return [print("id"), printLtBounds(":", print, node)];
|
||||
},
|
||||
[NodeType.WhereTypeBoundDeclaration](print, node) {
|
||||
return [printLtParameters(print, node), print("typeTarget"), printTypeBounds(":", print, node)];
|
||||
},
|
||||
[NodeType.WhereLtBoundDeclaration](print, node) {
|
||||
return [print("ltTarget"), printLtBounds(":", print, node)];
|
||||
},
|
||||
[NodeType.TypeTraitBound](print, node) {
|
||||
return [print.b("maybeConst", "~const "), print.b("optional", "?"), printLtParameters(print, node), print("typeExpression")];
|
||||
},
|
||||
[NodeType.TypeDynBounds](print, node) {
|
||||
return printTypeBounds("dyn", print, node);
|
||||
},
|
||||
[NodeType.TypeImplBounds](print, node) {
|
||||
return printTypeBounds("impl", print, node);
|
||||
},
|
||||
[NodeType.TypeFnPointer](print, node) {
|
||||
return [printLtParameters(print, node), print.b("unsafe"), print("extern"), "fn", printParametersAndReturnType(node)];
|
||||
},
|
||||
[NodeType.TypeFnPointerParameter](print, node) {
|
||||
return [f`${print("id")}: `, print("typeAnnotation")];
|
||||
},
|
||||
[NodeType.TypeFunction](print, node) {
|
||||
return [print("callee"), printParametersAndReturnType(node)];
|
||||
},
|
||||
[NodeType.TypeTuple](print, node) {
|
||||
return printArrayLike(print, node);
|
||||
},
|
||||
[NodeType.TypeSizedArray](print, node) {
|
||||
return sg_duo`[${print("typeExpression")};${print("sizeExpression")}]`;
|
||||
if (isSimpleType(node)) return ["[", print("typeExpression"), "; ", print("sizeExpression"), "]"];
|
||||
},
|
||||
[NodeType.TypeSlice](print, node) {
|
||||
if (isSimpleType(node)) return ["[", print("typeExpression"), "]"];
|
||||
return sg_single`[${print("typeExpression")}]`;
|
||||
},
|
||||
[NodeType.TypeReference](print, node) {
|
||||
return ["&", f`${print("lt")} `, print.b("mut"), print("typeExpression")];
|
||||
},
|
||||
[NodeType.TypeDereferenceConst](print, node) {
|
||||
return ["*const ", print("typeExpression")];
|
||||
},
|
||||
[NodeType.TypeDereferenceMut](print, node) {
|
||||
return ["*mut ", print("typeExpression")];
|
||||
},
|
||||
[NodeType.TypeParenthesized](print, node) {
|
||||
exit.never();
|
||||
return sg_single`(${print("typeExpression")})`;
|
||||
},
|
||||
};
|
||||
@@ -1,645 +0,0 @@
|
||||
import {
|
||||
ClosureFunctionExpression,
|
||||
ComparisonExpression,
|
||||
ConditionExpression,
|
||||
EnumDeclaration,
|
||||
EnumMemberStructDeclaration,
|
||||
ExpressionAsTypeCast,
|
||||
ExpressionNode,
|
||||
ExpressionStatement,
|
||||
ExpressionWithBody,
|
||||
LeftRightExpression,
|
||||
LogicalExpression,
|
||||
MacroDeclaration,
|
||||
MacroRulesDeclaration,
|
||||
MissingNode,
|
||||
Node,
|
||||
NodeType,
|
||||
NodeWithBody,
|
||||
NodeWithBodyOrCases,
|
||||
OperationExpression,
|
||||
PRCD,
|
||||
StructDeclaration,
|
||||
StructLiteral,
|
||||
StructPattern,
|
||||
TK,
|
||||
UnionDeclaration,
|
||||
} from "jinx-rust";
|
||||
import {
|
||||
can_have_OuterAttributes,
|
||||
getAstPath,
|
||||
getPrecedence,
|
||||
hasAttributes,
|
||||
hasBody,
|
||||
hasCondition,
|
||||
hasItems,
|
||||
hasLetScrutineeCondition,
|
||||
hasOuterAttributes,
|
||||
hasProperties,
|
||||
is_Attribute,
|
||||
is_AttributeOrDocComment,
|
||||
is_AwaitExpression,
|
||||
is_BitwiseOperator,
|
||||
is_CallExpression,
|
||||
is_ClosureFunctionExpression,
|
||||
is_ComparisonExpression,
|
||||
is_DocCommentAttribute,
|
||||
is_ElseBlock,
|
||||
is_EnumMemberDeclaration,
|
||||
is_EqualityOperator,
|
||||
is_ExpressionAsTypeCast,
|
||||
is_ExpressionStatement,
|
||||
is_ExpressionWithBody,
|
||||
is_ExpressionWithBodyOrCases,
|
||||
is_ExpressionWithBodyOrCases_or_BlockLikeMacroInvocation,
|
||||
is_FlowControlExpression,
|
||||
is_FlowControlMaybeValueExpression,
|
||||
is_ForInBlockExpression,
|
||||
is_Identifier,
|
||||
is_IfBlockExpression,
|
||||
is_ImplicitReturnAbleNode,
|
||||
is_LeftRightExpression,
|
||||
is_LetScrutinee,
|
||||
is_Literal,
|
||||
is_LiteralNumberLike,
|
||||
is_LogicalExpression,
|
||||
is_LoopBlockExpression,
|
||||
is_MatchExpression,
|
||||
is_MatchExpressionCase,
|
||||
is_MemberExpression,
|
||||
is_NodeWithBodyNoBody,
|
||||
is_NodeWithMaybePatternNoUnionBody,
|
||||
is_OperationExpression,
|
||||
is_ParenthesizedNode,
|
||||
is_PatternVariableDeclaration,
|
||||
is_PostfixExpression,
|
||||
is_RangeLiteral,
|
||||
is_ReassignmentNode,
|
||||
is_ReturnExpression,
|
||||
is_StatementNode,
|
||||
is_StructLiteral,
|
||||
is_StructLiteralProperty,
|
||||
is_StructPatternProperty,
|
||||
is_StructPropertyDeclaration,
|
||||
is_TypeBoundsStandaloneNode,
|
||||
is_TypeFunctionNode,
|
||||
is_TypeTraitBound,
|
||||
is_UnaryExpression,
|
||||
is_UnaryType,
|
||||
is_UnionPattern,
|
||||
is_WhileBlockExpression,
|
||||
is_YieldExpression,
|
||||
is_bitshiftOperator,
|
||||
is_multiplicativeOperator,
|
||||
} from "jinx-rust/utils";
|
||||
import { BlockLikeMacroInvocation, is_BlockLikeMacroInvocation, is_CallExpression_or_CallLikeMacroInvocation } from "../transform";
|
||||
import { exit, last_of } from "../utils/common";
|
||||
import { CF, hasBreaklineAfter, hasComment } from "./comments";
|
||||
import { flowControlExpressionNeedsOuterParens } from "./core";
|
||||
import { Doc, hardline, softline, willBreak } from "./external";
|
||||
import {
|
||||
assertPathAtNode,
|
||||
getContext,
|
||||
getGrandParentNode,
|
||||
getNode,
|
||||
getOptions,
|
||||
getParentNode,
|
||||
getPrintFn,
|
||||
is_printing_macro,
|
||||
pathCallAtParent,
|
||||
pathCallParentOf,
|
||||
stackIncludes,
|
||||
} from "./plugin";
|
||||
|
||||
export function needsOuterSoftbreakParens(node: Node) {
|
||||
const parent = getParentNode(node);
|
||||
|
||||
if (!parent) return false;
|
||||
|
||||
if (is_ExpressionAsTypeCast(node)) {
|
||||
return precedenceNeedsParens(node, parent);
|
||||
}
|
||||
|
||||
if (
|
||||
is_FlowControlMaybeValueExpression(parent) && //
|
||||
parent.expression === node &&
|
||||
flowControlExpressionNeedsOuterParens(parent)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
if (
|
||||
is_ExpressionWithBodyOrCases_or_BlockLikeMacroInvocation(node) &&
|
||||
(false ||
|
||||
(is_MemberExpression(parent) && parent.expression === node) ||
|
||||
(is_ExpressionWithBodyOrCases_or_BlockLikeMacroInvocation(parent) && !is_ElseBlock(node, parent)))
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (is_UnionPattern(node) && is_NodeWithMaybePatternNoUnionBody(parent)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (hasComment(node)) {
|
||||
if (is_UnaryExpression(parent)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (hasComment(node, CF.Line)) {
|
||||
if (is_ReturnExpression(parent) || (is_YieldExpression(parent) && parent.expression === node)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
hasComment(node, CF.Leading, (comment) => is_Attribute(comment) && !comment.inner) &&
|
||||
!can_have_OuterAttributes(node, parent, true)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export function needsInnerParens(node: Node) {
|
||||
if (needsOuterSoftbreakParens(node)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const parent = getParentNode(node);
|
||||
|
||||
if (!parent) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (is_Identifier(node)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (is_Literal(node)) {
|
||||
return is_LiteralNumberLike(node) && is_MemberExpression(parent) && node === parent.expression;
|
||||
}
|
||||
|
||||
if (is_CallExpression(parent) && parent.callee === node && is_MemberExpression(node)) {
|
||||
return !getOptions().actuallyMethodNodes.has(node);
|
||||
}
|
||||
|
||||
if (is_ReassignmentNode(node)) {
|
||||
if (is_printing_macro()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (is_ClosureFunctionExpression(parent) && node === parent.expression) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (is_ExpressionStatement(parent)) {
|
||||
return is_StructLiteral(node.left);
|
||||
}
|
||||
|
||||
if (is_ReassignmentNode(parent)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
if (is_ParenthesizedNode(parent)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (is_ExpressionStatement(parent)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (is_RangeLiteral(node)) {
|
||||
return (
|
||||
is_ExpressionAsTypeCast(parent) ||
|
||||
is_LogicalExpression(parent) ||
|
||||
is_UnaryExpression(parent) ||
|
||||
is_PostfixExpression(parent) ||
|
||||
(is_MemberExpression(parent) && node === parent.expression) ||
|
||||
(is_CallExpression(parent) && node === parent.callee) ||
|
||||
is_OperationExpression(parent) ||
|
||||
is_ComparisonExpression(parent)
|
||||
);
|
||||
}
|
||||
|
||||
if (is_LetScrutinee(parent) && is_LogicalExpression(node) && parent.expression === (node as any)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (is_UnaryExpression(node)) {
|
||||
switch (parent.nodeType) {
|
||||
case NodeType.MemberExpression:
|
||||
case NodeType.AwaitExpression:
|
||||
return node === parent.expression;
|
||||
case NodeType.CallExpression:
|
||||
return node === parent.callee;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (is_ExpressionWithBodyOrCases_or_BlockLikeMacroInvocation(node)) {
|
||||
if (is_ExpressionWithBodyOrCases(parent)) {
|
||||
return !is_ElseBlock(node, parent);
|
||||
}
|
||||
if (is_LetScrutinee(parent) && parent.expression === node && is_ExpressionWithBodyOrCases(getGrandParentNode())) {
|
||||
return true;
|
||||
}
|
||||
return (
|
||||
is_ExpressionAsTypeCast(parent) ||
|
||||
is_LogicalExpression(parent) ||
|
||||
is_UnaryExpression(parent) ||
|
||||
is_PostfixExpression(parent) ||
|
||||
(is_MemberExpression(parent) && node === parent.expression) ||
|
||||
(is_CallExpression(parent) && node === parent.callee) ||
|
||||
is_OperationExpression(parent) ||
|
||||
is_ComparisonExpression(parent) ||
|
||||
is_RangeLiteral(parent)
|
||||
);
|
||||
}
|
||||
|
||||
if (is_StructLiteral(node)) {
|
||||
if (is_ExpressionWithBodyOrCases(parent)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (is_LetScrutinee(parent) && parent.expression === node && is_ExpressionWithBodyOrCases(getGrandParentNode())) {
|
||||
return true;
|
||||
}
|
||||
if (is_UnaryExpression(parent) || is_PostfixExpression(parent) || is_MemberExpression(parent)) {
|
||||
return parent.expression === node;
|
||||
}
|
||||
if (is_CallExpression(parent)) {
|
||||
return parent.callee === node;
|
||||
}
|
||||
}
|
||||
|
||||
if (is_LogicalExpression(node) || is_OperationExpression(node) || is_ComparisonExpression(node) || is_ClosureFunctionExpression(node)) {
|
||||
return precedenceNeedsParens(node, parent);
|
||||
}
|
||||
|
||||
if (is_TypeFunctionNode(node)) {
|
||||
const gp = getGrandParentNode();
|
||||
if (node.returnType && is_TypeTraitBound(parent) && is_TypeBoundsStandaloneNode(gp) && last_of(gp.typeBounds) !== parent) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (is_TypeBoundsStandaloneNode(node)) {
|
||||
return (
|
||||
(is_UnaryType(parent) && node.typeBounds.length > 1) ||
|
||||
is_TypeBoundsStandaloneNode(parent) ||
|
||||
is_TypeTraitBound(parent) ||
|
||||
(is_TypeFunctionNode(parent) && parent.returnType === node)
|
||||
);
|
||||
}
|
||||
|
||||
if (is_PatternVariableDeclaration(parent)) {
|
||||
return is_UnionPattern(node);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function precedenceNeedsParens(node: LeftRightExpression | ClosureFunctionExpression | ExpressionAsTypeCast, parent: Node) {
|
||||
if (is_UnaryExpression(parent) || is_PostfixExpression(parent)) return true;
|
||||
if (is_ReassignmentNode(parent)) return parent.left === node;
|
||||
if (is_MemberExpression(parent)) return parent.expression === node;
|
||||
if (is_CallExpression(parent)) return parent.callee === node;
|
||||
if (is_ExpressionAsTypeCast(parent)) return !is_ExpressionAsTypeCast(node);
|
||||
if (is_LogicalExpression(parent)) return is_LogicalExpression(node) ? parent.nodeType !== node.nodeType : evalPrecedence(node, parent);
|
||||
if (is_OperationExpression(parent) || is_ComparisonExpression(parent)) return evalPrecedence(node, parent);
|
||||
return false;
|
||||
function evalPrecedence(
|
||||
child: LeftRightExpression | ClosureFunctionExpression | ExpressionAsTypeCast,
|
||||
parent: ComparisonExpression | OperationExpression | LogicalExpression
|
||||
) {
|
||||
if (is_ExpressionAsTypeCast(child) || is_ClosureFunctionExpression(child)) {
|
||||
return true;
|
||||
}
|
||||
function getPrec(node, bool) {
|
||||
// if (is_EqualityOperator(node.tk)) {
|
||||
// return 11.3;
|
||||
// }
|
||||
// if (is_LargerLesserOperator(node.tk)) {
|
||||
// return 11.6;
|
||||
// }
|
||||
return getPrecedence(node, bool);
|
||||
}
|
||||
|
||||
const childPRCD = getPrec(child, is_insideScrutinee(child));
|
||||
const parentPRCD = getPrec(parent, is_insideScrutinee(parent));
|
||||
|
||||
if (parentPRCD > childPRCD) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (parentPRCD === childPRCD && parent.right === child) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (parentPRCD === childPRCD && !shouldFlatten(parent, child)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (parentPRCD < childPRCD && child.tk === TK["%"]) {
|
||||
return parentPRCD === PRCD["+-"];
|
||||
}
|
||||
|
||||
if (is_BitwiseOperator(parent.tk) || (is_BitwiseOperator(child.tk) && is_EqualityOperator(parent.tk))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function shouldFlatten(parent: ExpressionNode | ConditionExpression, node: ExpressionNode | ConditionExpression) {
|
||||
if (getPrecedence(node, is_insideScrutinee(node)) !== getPrecedence(parent, is_insideScrutinee(parent))) return false;
|
||||
if (is_ComparisonExpression(parent) && is_ComparisonExpression(node)) return false;
|
||||
if (is_OperationExpression(parent) && is_OperationExpression(node)) {
|
||||
if (
|
||||
(node.tk === TK["%"] && is_multiplicativeOperator(parent.tk)) ||
|
||||
(parent.tk === TK["%"] && is_multiplicativeOperator(node.tk)) ||
|
||||
(node.tk !== parent.tk && is_multiplicativeOperator(node.tk) && is_multiplicativeOperator(parent.tk)) ||
|
||||
(is_bitshiftOperator(node.tk) && is_bitshiftOperator(parent.tk))
|
||||
)
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function needsParens(node: Node) {
|
||||
return needsOuterSoftbreakParens(node) || needsInnerParens(node);
|
||||
}
|
||||
|
||||
export function stmtNeedsSemi(stmt: ExpressionStatement, disregardExprType = false) {
|
||||
return pathCallParentOf(stmt, (parent) => needsSemi(parent as any, stmt, disregardExprType));
|
||||
}
|
||||
|
||||
const NoNode = { nodeType: 0 } as MissingNode;
|
||||
|
||||
export function needsSemi(parent: NodeWithBody, stmt: ExpressionStatement, disregardExprType = false) {
|
||||
const expr = disregardExprType ? NoNode : stmt.expression!;
|
||||
const hadSemi = !disregardExprType && stmt.semi;
|
||||
|
||||
return (
|
||||
!!expr &&
|
||||
(forcePreserveSemi()
|
||||
? true
|
||||
: shouldNeverSemi()
|
||||
? false
|
||||
: shouldPreserveSemi()
|
||||
? hadSemi || shouldAlwaysSemi() || canAutoCompleteSemi()
|
||||
: true)
|
||||
);
|
||||
|
||||
function forcePreserveSemi() {
|
||||
/** Rust Compiler bug (preserve optional semicolon) */
|
||||
// rust-lang/rust#70844 https://github.com/rust-lang/rust/issues/70844
|
||||
// issue#22 https://github.com/jinxdash/prettier-plugin-rust/issues/22
|
||||
return (
|
||||
hadSemi &&
|
||||
stmt === last_of(parent.body!) &&
|
||||
((is_IfBlockExpression(expr) &&
|
||||
hasLetScrutineeCondition(expr) &&
|
||||
!(is_LetScrutinee(expr.condition) && is_Identifier(expr.condition.expression))) ||
|
||||
(is_MatchExpression(expr) && !is_Identifier(expr.expression)))
|
||||
);
|
||||
}
|
||||
function shouldNeverSemi() {
|
||||
return is_ExpressionWithBodyOrCases_or_BlockLikeMacroInvocation(expr);
|
||||
}
|
||||
function shouldPreserveSemi() {
|
||||
return stmt === last_of(parent.body!) && (is_ImplicitReturnAbleNode(parent) || is_BlockLikeMacroInvocation(parent));
|
||||
}
|
||||
function shouldAlwaysSemi() {
|
||||
return is_FlowControlExpression(expr) || is_ReassignmentNode(expr);
|
||||
}
|
||||
function canAutoCompleteSemi() {
|
||||
return withPathAt(parent, function checkParent(child: NodeWithBodyOrCases): boolean {
|
||||
return pathCallParentOf(child, (parent) => {
|
||||
if (is_IfBlockExpression(parent) && parent.else === child) {
|
||||
// if ... { ... } else if { ... } ...
|
||||
// ^ ------------------------------- parent
|
||||
// ^ ----------- child
|
||||
return checkParent(parent);
|
||||
}
|
||||
if (is_ExpressionStatement(parent)) {
|
||||
// { .... { ... } ... }
|
||||
// ^ -----------------^ grandparent
|
||||
// ^ --- ^ ExpressionStatement<child>
|
||||
if (hasOuterAttributes(parent)) return false;
|
||||
return stmtNeedsSemi(parent, true);
|
||||
}
|
||||
if (is_MatchExpressionCase(parent) && parent.expression === child) {
|
||||
return pathCallParentOf(parent, checkParent);
|
||||
}
|
||||
return false;
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function canInlineBlockBody(node: NodeWithBodyOrCases | BlockLikeMacroInvocation): boolean {
|
||||
if (!is_ExpressionWithBody(node)) {
|
||||
return false;
|
||||
}
|
||||
const body = node.body;
|
||||
|
||||
if (body.length === 0) {
|
||||
return canInlineInlineable(node);
|
||||
}
|
||||
|
||||
if (body.length === 1) {
|
||||
const stmt = body[0];
|
||||
if (is_AttributeOrDocComment(stmt)) {
|
||||
return true;
|
||||
}
|
||||
if (is_ExpressionStatement(stmt) && !needsSemi(node, stmt)) {
|
||||
/**
|
||||
* parent ( ExpressionStatement | StructLiteralProperty | LetVariableDeclaration | ... )
|
||||
* ...
|
||||
* node {
|
||||
* expr
|
||||
* }
|
||||
* ...
|
||||
*
|
||||
*
|
||||
* Q: Can you inline "node { expr }" ?
|
||||
*/
|
||||
const expr = stmt.expression!;
|
||||
|
||||
if (
|
||||
is_FlowControlExpression(expr) || //
|
||||
is_ClosureFunctionExpression(expr) ||
|
||||
is_ExpressionWithBodyOrCases_or_BlockLikeMacroInvocation(expr)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return canInlineInlineable(node);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// function q(node: ExpressionWithBody) {
|
||||
// pathCallTopMostIfBlockExpression(node, (node) => {});
|
||||
// }
|
||||
|
||||
function canInlineInlineable(node: ExpressionWithBody) {
|
||||
if (is_ForInBlockExpression(node) || is_LoopBlockExpression(node)) {
|
||||
return false;
|
||||
}
|
||||
if (is_WhileBlockExpression(node)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const parent = getParentNode(node)!;
|
||||
|
||||
if (
|
||||
is_ExpressionStatement(parent) &&
|
||||
(!is_ImplicitReturnAbleNode(node) || pathCallAtParent(parent, (parent) => stmtNeedsSemi(parent, true)))
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (is_ElseBlock(node, parent)) {
|
||||
return pathCallAtParent(parent, canInlineBlockBody);
|
||||
}
|
||||
// if (is_CaseBlock(node, parent)) {
|
||||
// return false;
|
||||
// }
|
||||
if (is_IfBlockExpression(node)) {
|
||||
if (
|
||||
!node.else ||
|
||||
// hasLetScrutineeCondition(node) ||
|
||||
is_ExpressionWithBodyOrCases_or_BlockLikeMacroInvocation(node.condition) ||
|
||||
willBreak(getPrintFn(node)("condition"))
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const grandparent = getGrandParentNode();
|
||||
if (is_ExpressionStatement(parent) && hasBody(grandparent) && grandparent.body.length > 1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
return (
|
||||
is_CallExpression_or_CallLikeMacroInvocation(parent) ||
|
||||
hasItems(parent) ||
|
||||
hasProperties(parent) ||
|
||||
is_ClosureFunctionExpression(parent) ||
|
||||
is_MemberExpression(parent) ||
|
||||
is_AwaitExpression(parent) ||
|
||||
is_LeftRightExpression(parent)
|
||||
);
|
||||
}
|
||||
|
||||
type NodeWithBracketContent =
|
||||
| NodeWithBodyOrCases
|
||||
| BlockLikeMacroInvocation
|
||||
| EnumDeclaration
|
||||
| StructDeclaration
|
||||
| StructLiteral
|
||||
| StructPattern
|
||||
| EnumMemberStructDeclaration
|
||||
| UnionDeclaration
|
||||
| MacroRulesDeclaration
|
||||
| MacroDeclaration;
|
||||
|
||||
export function emptyContent(node: NodeWithBracketContent): Doc {
|
||||
switch (node.nodeType) {
|
||||
case NodeType.Program:
|
||||
case NodeType.MacroRulesDeclaration:
|
||||
case NodeType.MacroDeclaration:
|
||||
case NodeType.ExternBlockDeclaration:
|
||||
case NodeType.ModuleDeclaration:
|
||||
case NodeType.TraitDeclaration:
|
||||
case NodeType.StructDeclaration:
|
||||
case NodeType.MacroInvocation:
|
||||
case NodeType.FunctionDeclaration:
|
||||
case NodeType.ImplDeclaration:
|
||||
case NodeType.UnionDeclaration:
|
||||
case NodeType.EnumDeclaration:
|
||||
case NodeType.EnumMemberStructDeclaration:
|
||||
case NodeType.StructLiteral:
|
||||
case NodeType.StructPattern:
|
||||
// case NodeType.MatchExpression:
|
||||
return "";
|
||||
case NodeType.BlockExpression:
|
||||
case NodeType.WhileBlockExpression:
|
||||
case NodeType.ForInBlockExpression:
|
||||
case NodeType.TryBlockExpression:
|
||||
case NodeType.IfBlockExpression:
|
||||
return canInlineInlineable(node)
|
||||
? is_IfBlockExpression(node) || is_ElseBlock(node, getParentNode()!)
|
||||
? softline
|
||||
: ""
|
||||
: hardline;
|
||||
case NodeType.LoopBlockExpression:
|
||||
case NodeType.MatchExpression:
|
||||
return hardline;
|
||||
default:
|
||||
if (is_NodeWithBodyNoBody(node)) {
|
||||
return "";
|
||||
}
|
||||
__DEV__: exit.never(node);
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
export function is_insideScrutinee(target: Node) {
|
||||
return withPathAt(target, (n) => stackIncludes("condition") && r(n));
|
||||
function r(CHILD: Node) {
|
||||
switch (CHILD.nodeType) {
|
||||
case NodeType.OrExpression:
|
||||
case NodeType.AndExpression:
|
||||
return pathCallParentOf(CHILD, (PARENT) =>
|
||||
hasCondition(PARENT) && PARENT.condition === CHILD //
|
||||
? hasLetScrutineeCondition(PARENT)
|
||||
: r(PARENT)
|
||||
);
|
||||
case NodeType.LetScrutinee:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function withPathAt<T extends Node, R>(target: T, callback: (target: T) => R): R {
|
||||
if (target === getNode()) return callback(target);
|
||||
if (target === getParentNode()) return pathCallAtParent(target, () => callback(target));
|
||||
if (stackIncludes(target)) return pathCallAtParent(getParentNode()!, () => withPathAt(target, callback));
|
||||
return getContext().path.call(() => {
|
||||
__DEV__: assertPathAtNode("withPathAt", target);
|
||||
return callback(target); // @ts-ignore
|
||||
}, ...getAstPath(getNode(), target));
|
||||
}
|
||||
export function shouldPrintOuterAttributesAbove(node: Node) {
|
||||
return (
|
||||
is_StatementNode(node) ||
|
||||
is_MatchExpressionCase(node) ||
|
||||
(hasAttributes(node) &&
|
||||
node.attributes.some(
|
||||
canInlineOuterAttribute(node)
|
||||
? (attr) => is_DocCommentAttribute(attr) || hasBreaklineAfter(attr) //
|
||||
: is_DocCommentAttribute
|
||||
))
|
||||
);
|
||||
function canInlineOuterAttribute(node: Node) {
|
||||
return (
|
||||
is_EnumMemberDeclaration(node) ||
|
||||
is_StructPropertyDeclaration(node) ||
|
||||
is_StructLiteralProperty(node) ||
|
||||
is_StructPatternProperty(node)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,119 @@
|
||||
import { plugin } from "./format/plugin";
|
||||
/**
|
||||
* Prettier Plugin for Rust formatting using rust WebAssembly
|
||||
*
|
||||
* This plugin provides support for formatting Rust files using the rust WASM implementation.
|
||||
*/
|
||||
import type { Plugin, Parser, Printer } from 'prettier';
|
||||
|
||||
export default plugin;
|
||||
export const languages = plugin.languages;
|
||||
export const parsers = plugin.parsers;
|
||||
export const printers = plugin.printers;
|
||||
export const options = plugin.options;
|
||||
export const defaultOptions = plugin.defaultOptions;
|
||||
// Import the rust WASM module
|
||||
import rustFmtInit, { format, type Config } from './rust_fmt_vite.js';
|
||||
|
||||
const parserName = 'rust';
|
||||
|
||||
// Language configuration
|
||||
const languages = [
|
||||
{
|
||||
name: 'Rust',
|
||||
aliases: ['rust', 'rs'],
|
||||
parsers: [parserName],
|
||||
extensions: ['.rs', '.rs.in'],
|
||||
aceMode: 'rust',
|
||||
tmScope: 'source.rust',
|
||||
linguistLanguageId: 327,
|
||||
vscodeLanguageIds: ['rust']
|
||||
}
|
||||
];
|
||||
|
||||
// Parser configuration
|
||||
const rustParser: Parser<string> = {
|
||||
astFormat: parserName,
|
||||
parse: (text: string) => text,
|
||||
locStart: () => 0,
|
||||
locEnd: (node: string) => node.length,
|
||||
};
|
||||
|
||||
// Lazy initialize rust WASM module
|
||||
let initPromise: Promise<void> | null = null;
|
||||
let isInitialized = false;
|
||||
|
||||
function initRustFmt(): Promise<void> {
|
||||
if (isInitialized) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (!initPromise) {
|
||||
initPromise = (async () => {
|
||||
try {
|
||||
await rustFmtInit();
|
||||
isInitialized = true;
|
||||
} catch (error) {
|
||||
console.warn('Failed to initialize rust WASM module:', error);
|
||||
initPromise = null;
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
// Printer configuration
|
||||
const rustPrinter: Printer<string> = {
|
||||
// @ts-expect-error -- Support async printer like shell plugin
|
||||
async print(path, options) {
|
||||
try {
|
||||
// Wait for initialization to complete
|
||||
await initRustFmt();
|
||||
|
||||
const text = (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
const config = getRustFmtConfig(options);
|
||||
|
||||
// Format using rust (synchronous call)
|
||||
const formatted = format(text, config);
|
||||
|
||||
return formatted.trim();
|
||||
} catch (error) {
|
||||
console.warn('Rust formatting failed:', error);
|
||||
// Return original text if formatting fails
|
||||
return (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Helper function to create rust config from Prettier options
|
||||
function getRustFmtConfig(options: any): Config {
|
||||
const config: Config = {};
|
||||
|
||||
// Map Prettier options to rust config
|
||||
if (options.useTabs !== undefined) {
|
||||
config.use_tabs = options.useTabs;
|
||||
}
|
||||
|
||||
// Note: rust currently only supports use_tabs option
|
||||
// Future versions may support more options like tab_width
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
// Plugin options
|
||||
const options = {
|
||||
// Currently rust only supports use_tabs option
|
||||
// The tab width and other formatting options are handled by prettyplease internally
|
||||
};
|
||||
|
||||
// Plugin definition
|
||||
const rustPlugin: Plugin = {
|
||||
languages,
|
||||
parsers: {
|
||||
[parserName]: rustParser,
|
||||
},
|
||||
printers: {
|
||||
[parserName]: rustPrinter,
|
||||
},
|
||||
options,
|
||||
};
|
||||
|
||||
export default rustPlugin;
|
||||
export { languages };
|
||||
export const parsers = rustPlugin.parsers;
|
||||
export const printers = rustPlugin.printers;
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
import {
|
||||
AttrSegment,
|
||||
CallExpression,
|
||||
DelimKind,
|
||||
ExpressionPath,
|
||||
Identifier,
|
||||
Literal,
|
||||
LocArray,
|
||||
MacroInvocation,
|
||||
NodeType,
|
||||
PunctuationToken,
|
||||
ReassignmentExpression,
|
||||
TK,
|
||||
rs,
|
||||
} from "jinx-rust";
|
||||
import { isTK, start } from "jinx-rust/utils";
|
||||
import { assert, exit } from "../../utils/common";
|
||||
import { isIdent } from "./utils";
|
||||
|
||||
type SimpleAttrItem =
|
||||
| Identifier //
|
||||
| Literal
|
||||
| ExpressionPath
|
||||
| CallExpression
|
||||
| ReassignmentExpression
|
||||
| MacroInvocation;
|
||||
|
||||
export function transform_simpleAttrSyntax(segments: MacroInvocation["segments"]) {
|
||||
assert(segments.length !== 0, segments.loc.url());
|
||||
return transform_segments(segments, false);
|
||||
|
||||
function transform_segments<N extends boolean>(
|
||||
seq: LocArray<AttrSegment>,
|
||||
nestedCall: N
|
||||
): N extends true ? LocArray<SimpleAttrItem, "()"> : SimpleAttrItem {
|
||||
let i = 0;
|
||||
|
||||
if (nestedCall) {
|
||||
const args = rs.createLocArray<SimpleAttrItem, any>(DelimKind["()"], seq.loc.clone());
|
||||
while (i !== seq.length) {
|
||||
args.push(read(true));
|
||||
if (i === seq.length) break;
|
||||
assert(isTK(seq[i++], TK[","]));
|
||||
}
|
||||
return args as any;
|
||||
} else {
|
||||
const res = read(true);
|
||||
assert(i === seq.length, res.loc.url());
|
||||
return res as any;
|
||||
}
|
||||
|
||||
function read(allowEq: boolean): SimpleAttrItem {
|
||||
let lhs: Identifier | ExpressionPath;
|
||||
|
||||
switch (seq[i].nodeType) {
|
||||
case NodeType.Literal:
|
||||
return seq[i++] as Literal;
|
||||
case NodeType.Identifier:
|
||||
lhs = seq[i++] as Identifier;
|
||||
break;
|
||||
case NodeType.PunctuationToken:
|
||||
assert((seq[i] as PunctuationToken).tk === TK["::"], seq[i].loc.url());
|
||||
lhs = eatPathSegment(undefined);
|
||||
break;
|
||||
default:
|
||||
exit.never();
|
||||
}
|
||||
|
||||
while (true) {
|
||||
if (i === seq.length) return lhs;
|
||||
const seg = seq[i];
|
||||
switch (seg.nodeType) {
|
||||
case NodeType.PunctuationToken:
|
||||
switch (seg.tk) {
|
||||
case TK[","]:
|
||||
assert(nestedCall);
|
||||
return lhs;
|
||||
case TK["="]: {
|
||||
assert(allowEq);
|
||||
const right = (i++, read(false));
|
||||
return rs.mockNode(NodeType.ReassignmentExpression, right.loc.cloneFrom(start(lhs)), {
|
||||
tk: TK["="],
|
||||
kind: DelimKind["="],
|
||||
left: lhs,
|
||||
right,
|
||||
});
|
||||
}
|
||||
case TK["::"]:
|
||||
lhs = eatPathSegment(lhs);
|
||||
continue;
|
||||
default:
|
||||
exit.never();
|
||||
}
|
||||
case NodeType.DelimGroup:
|
||||
assert(seg.segments.dk === DelimKind["()"]);
|
||||
return rs.mockNode(NodeType.CallExpression, seq[i++].loc.cloneFrom(start(lhs)), {
|
||||
callee: lhs,
|
||||
typeArguments: undefined,
|
||||
method: undefined,
|
||||
arguments: transform_segments(seg.segments, true),
|
||||
});
|
||||
default:
|
||||
exit.never();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function eatPathSegment(left: undefined | Identifier | ExpressionPath) {
|
||||
const segment = seq[i + 1];
|
||||
assert(isIdent(segment));
|
||||
const res = rs.mockNode(NodeType.ExpressionPath, segment.loc.cloneFrom(start(left ?? seq[i])), { namespace: left, segment });
|
||||
i += 2;
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
import {
|
||||
DelimGroup,
|
||||
DelimKind,
|
||||
IfBlockExpression,
|
||||
LocArray,
|
||||
MacroInvocation,
|
||||
NodeType,
|
||||
NodeWithBody,
|
||||
rs,
|
||||
Segment,
|
||||
Snippet,
|
||||
StatementNode,
|
||||
TK,
|
||||
} from "jinx-rust";
|
||||
import { insertNodes, start, transferAttributes } from "jinx-rust/utils";
|
||||
import { assert, iLast } from "../../utils/common";
|
||||
import { isGroup, isIdent, isToken } from "./utils";
|
||||
|
||||
export function transform_macro_cfg_if(segments: MacroInvocation["segments"]) {
|
||||
const danglingAttributes: Snippet["danglingAttributes"] = [];
|
||||
const comments: Snippet["comments"] = [];
|
||||
|
||||
const block = (function create_if_block(i: number): IfBlockExpression | undefined {
|
||||
if (i >= segments.length) return undefined;
|
||||
|
||||
const _if = segments[i];
|
||||
const pound = segments[i + 1];
|
||||
const grp = segments[i + 2];
|
||||
const block = segments[i + 3];
|
||||
const _else = segments[i + 4];
|
||||
|
||||
assert(
|
||||
isIdent(_if, "if") &&
|
||||
isToken(pound, TK["#"]) &&
|
||||
isGroup(grp, DelimKind["[]"]) &&
|
||||
isGroup(block, DelimKind["{}"]) &&
|
||||
(!_else || isIdent(_else, "else"))
|
||||
);
|
||||
|
||||
return create_block(block, (body) =>
|
||||
rs.mockNode(NodeType.IfBlockExpression, block.loc.cloneFrom(start(_if)), {
|
||||
label: undefined,
|
||||
condition: rs.mockNode(NodeType.Attribute, grp.loc.cloneFrom(start(pound)), {
|
||||
segments: grp.segments,
|
||||
value: grp.segments.loc.sliceText(),
|
||||
line: false,
|
||||
inner: false,
|
||||
}) as any,
|
||||
body: body,
|
||||
else: (_else && iLast(i + 5, segments)
|
||||
? function create_else_block(i: number) {
|
||||
const block = segments[i];
|
||||
assert(isGroup(block, DelimKind["{}"]));
|
||||
return create_block(block, (body) =>
|
||||
rs.mockNode(NodeType.BlockExpression, body.loc.clone(), {
|
||||
label: undefined,
|
||||
body,
|
||||
})
|
||||
);
|
||||
}
|
||||
: create_if_block)(i + 5),
|
||||
})
|
||||
);
|
||||
})(0);
|
||||
|
||||
const ast = rs.createLocArray(
|
||||
segments.dk,
|
||||
segments.loc,
|
||||
block && [
|
||||
rs.mockNode(NodeType.ExpressionStatement, block.loc.clone(), {
|
||||
expression: block,
|
||||
semi: false,
|
||||
}),
|
||||
]
|
||||
);
|
||||
|
||||
return rs.mockNode(NodeType.Snippet, segments.loc.clone(), { ast, danglingAttributes, comments });
|
||||
|
||||
function create_block<R extends NodeWithBody>(
|
||||
group: DelimGroup<Segment> & { segments: { dk: 3 } },
|
||||
fn: (statements: LocArray<StatementNode, "{}">) => R
|
||||
): R {
|
||||
const snippet = rs.toBlockBody(group.segments);
|
||||
|
||||
insertNodes(danglingAttributes, snippet.danglingAttributes);
|
||||
insertNodes(comments, snippet.comments);
|
||||
|
||||
const block = fn(snippet.ast);
|
||||
transferAttributes(snippet, block);
|
||||
return block;
|
||||
}
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
import { DelimGroup, DelimKind, Identifier, LocArray, PunctuationToken, Segment, TK } from "jinx-rust";
|
||||
import { isTK, is_DelimGroup, is_Identifier, is_PunctuationToken } from "jinx-rust/utils";
|
||||
|
||||
export function isIdent(node: Segment | undefined, name?: string): node is Identifier {
|
||||
return !!node && is_Identifier(node) && (null == name || node.name === name);
|
||||
}
|
||||
export function isToken(node: Segment | undefined, tk?: TK): node is PunctuationToken {
|
||||
return !!node && (null == tk ? is_PunctuationToken(node) : isTK(node, tk));
|
||||
}
|
||||
export function isGroup<D extends DelimKind>(node: Segment | undefined, dk?: D): node is DelimGroup & { segments: LocArray<any, D> } {
|
||||
return !!node && is_DelimGroup(node) && (null == dk || node.segments.dk === dk);
|
||||
}
|
||||
|
||||
export function isCallLike(tk_1: Segment | undefined, tk_2: Segment | undefined): boolean {
|
||||
return !!tk_1 && !!tk_2 && is_Identifier(tk_1) && is_DelimGroup(tk_2) && tk_2.segments.dk === DelimKind["()"];
|
||||
}
|
||||
@@ -1,568 +0,0 @@
|
||||
import {
|
||||
Attribute,
|
||||
AttributeOrDocComment,
|
||||
CallExpression,
|
||||
DelimKind,
|
||||
ExpressionNode,
|
||||
LocArray,
|
||||
MacroInvocation,
|
||||
MemberExpression,
|
||||
Node,
|
||||
NodeType,
|
||||
NodeWithBodyNoBody,
|
||||
NodeWithTypeBounds,
|
||||
NTMap,
|
||||
ProgramLike,
|
||||
rs,
|
||||
Snippet,
|
||||
StatementNode,
|
||||
StructLiteral,
|
||||
StructPattern,
|
||||
TK,
|
||||
TypeBound,
|
||||
TypeBoundsStandaloneNode,
|
||||
TypeDynBounds,
|
||||
TypeTraitBound,
|
||||
} from "jinx-rust";
|
||||
import {
|
||||
countActualNodeChildren,
|
||||
deleteAttributes,
|
||||
each_childNode,
|
||||
end,
|
||||
getActualNodeChildren,
|
||||
getBodyOrCases,
|
||||
getMacroName,
|
||||
getNodeChildren,
|
||||
hasAttributes,
|
||||
hasMethod,
|
||||
hasTypeBounds,
|
||||
includesTK,
|
||||
insertNode,
|
||||
insertNodes,
|
||||
is_AttributeOrDocComment,
|
||||
is_BareTypeTraitBound,
|
||||
is_BlockExpression,
|
||||
is_CallExpression,
|
||||
is_ClosureFunctionExpression,
|
||||
is_DocCommentAttribute,
|
||||
is_ExpressionStatement,
|
||||
is_ExpressionWithBodyOrCases,
|
||||
is_FlowControlExpression,
|
||||
is_IfBlockExpression,
|
||||
is_MacroInvocation,
|
||||
is_Node,
|
||||
is_NodeWithBodyNoBody,
|
||||
is_NodeWithBodyOrCases,
|
||||
is_Program,
|
||||
is_PunctuationToken,
|
||||
is_ReassignmentNode,
|
||||
is_Snippet,
|
||||
is_TypeBoundsStandaloneNode,
|
||||
is_TypeDynBounds,
|
||||
is_TypeImplBounds,
|
||||
is_TypeTraitBound,
|
||||
ownStart,
|
||||
reassignNodeProperty,
|
||||
start,
|
||||
transferAttributes,
|
||||
unsafe_set_nodeType,
|
||||
} from "jinx-rust/utils";
|
||||
import { isPrettierIgnoreAttribute, setPrettierIgnoreTarget } from "../format/comments";
|
||||
import { is_StructSpread } from "../format/core";
|
||||
import { CustomOptions } from "../format/external";
|
||||
import { getOptions } from "../format/plugin";
|
||||
import {
|
||||
Array_replace,
|
||||
Array_splice,
|
||||
assert,
|
||||
binarySearchIn,
|
||||
each,
|
||||
exit,
|
||||
iLast,
|
||||
last_of,
|
||||
Map_get,
|
||||
spliceAll,
|
||||
try_eval,
|
||||
} from "../utils/common";
|
||||
import { transform_simpleAttrSyntax } from "./custom/attribute";
|
||||
import { transform_macro_cfg_if } from "./custom/cfg_if";
|
||||
|
||||
export interface ExpressionLikeAttribute extends Attribute {
|
||||
segments: LocArray<any, "[]">;
|
||||
}
|
||||
|
||||
export interface CallLikeMacroInvocation extends MacroInvocation {
|
||||
segments: LocArray<any, any>;
|
||||
callee: MacroInvocation["callee"];
|
||||
method: undefined;
|
||||
typeArguments: undefined;
|
||||
arguments: LocArray<ExpressionNode, "()" | "[]" | "{}">;
|
||||
}
|
||||
|
||||
export interface BlockLikeMacroInvocation extends MacroInvocation {
|
||||
segments: LocArray<any, any>;
|
||||
body: LocArray<StatementNode, "()" | "[]" | "{}">;
|
||||
attributes?: AttributeOrDocComment[];
|
||||
}
|
||||
|
||||
export function is_CallLikeMacroInvocation(node: Node): node is CallLikeMacroInvocation {
|
||||
return is_MacroInvocation(node) && "arguments" in node;
|
||||
}
|
||||
|
||||
export function is_BlockLikeMacroInvocation(node: Node): node is BlockLikeMacroInvocation {
|
||||
return is_MacroInvocation(node) && "body" in node;
|
||||
}
|
||||
|
||||
export function is_CallExpression_or_CallLikeMacroInvocation(node: any): node is CallExpression | CallLikeMacroInvocation {
|
||||
return is_CallExpression(node) || is_CallLikeMacroInvocation(node);
|
||||
}
|
||||
|
||||
const IGNORED_MACROS = new Set([
|
||||
// std
|
||||
// crates
|
||||
"quote",
|
||||
]);
|
||||
|
||||
const HARDCODED_MACRO_DELIMS = new Map<string, MacroInvocation["segments"]["dk"]>();
|
||||
each(
|
||||
{
|
||||
[DelimKind["{}"]]: [
|
||||
// std
|
||||
"thread_local",
|
||||
// crates
|
||||
"cfg_if",
|
||||
],
|
||||
[DelimKind["()"]]: [
|
||||
// std
|
||||
"assert_eq",
|
||||
"assert_ne",
|
||||
"assert",
|
||||
"cfg",
|
||||
"concat_bytes",
|
||||
"concat_idents",
|
||||
"concat",
|
||||
"debug_assert_eq",
|
||||
"debug_assert_ne",
|
||||
"debug_assert",
|
||||
"eprint",
|
||||
"eprintln",
|
||||
"format_args_nl",
|
||||
"format_args",
|
||||
"format",
|
||||
"matches",
|
||||
"panic",
|
||||
"print",
|
||||
"println",
|
||||
"try",
|
||||
"unimplemented",
|
||||
"unreachable",
|
||||
"write",
|
||||
"writeln",
|
||||
// crates
|
||||
],
|
||||
[DelimKind["[]"]]: [
|
||||
// std
|
||||
"vec",
|
||||
// crates
|
||||
],
|
||||
},
|
||||
(names, tk) =>
|
||||
each(names, (name) => {
|
||||
HARDCODED_MACRO_DELIMS.set(name, +tk as MacroInvocation["segments"]["dk"]);
|
||||
})
|
||||
);
|
||||
|
||||
let _COMMENTS: CustomOptions["comments"] = undefined!;
|
||||
let _DANGLING_ATTRIBUTES: CustomOptions["danglingAttributes"] = undefined!;
|
||||
|
||||
export function transform_ast(options: CustomOptions) {
|
||||
try {
|
||||
_COMMENTS = options.comments;
|
||||
_DANGLING_ATTRIBUTES = options.danglingAttributes;
|
||||
transformNode(options.rsParsedFile);
|
||||
} finally {
|
||||
_depth = 0;
|
||||
_COMMENTS = undefined!;
|
||||
_DANGLING_ATTRIBUTES = undefined!;
|
||||
}
|
||||
}
|
||||
|
||||
let _depth = 0;
|
||||
const isReadingSnippet = () => 0 !== _depth;
|
||||
|
||||
function maybe_transform_node<T extends Node, S extends Snippet>(
|
||||
node: T,
|
||||
read_snippet: () => S,
|
||||
fn: (node: T, snippet: S) => void
|
||||
): T | undefined {
|
||||
const snippet = try_eval(read_snippet);
|
||||
if (snippet) {
|
||||
++_depth;
|
||||
transformNode(snippet);
|
||||
--_depth;
|
||||
fn(node, snippet);
|
||||
transformed.add(node);
|
||||
return node;
|
||||
}
|
||||
}
|
||||
const transformed = new WeakSet<Node>();
|
||||
export function isTransformed(node: Node) {
|
||||
return transformed.has(node);
|
||||
}
|
||||
|
||||
const transform: { [K in NodeType]?: (node: NTMap[K]) => void } = {
|
||||
[NodeType.Attribute](node) {
|
||||
try_eval(() => {
|
||||
node.segments = rs.createLocArray(node.segments.dk, node.segments.loc.clone(), [
|
||||
transform_simpleAttrSyntax(node.segments),
|
||||
]) as any;
|
||||
transformed.add(node);
|
||||
});
|
||||
},
|
||||
[NodeType.MacroInlineRuleDeclaration](node) {
|
||||
node.match.dk = DelimKind["()"];
|
||||
node.transform.dk = DelimKind["{}"];
|
||||
},
|
||||
[NodeType.MacroInvocation](node) {
|
||||
const name = getMacroName(node);
|
||||
|
||||
if (
|
||||
IGNORED_MACROS.has(name) ||
|
||||
node.segments.length === 0 ||
|
||||
(node.segments.length === 1 && is_PunctuationToken(node.segments[0]))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tk = transformMacroDelim(name, node);
|
||||
|
||||
if (name === "matches") {
|
||||
//
|
||||
}
|
||||
|
||||
if (name === "if_chain") {
|
||||
//
|
||||
}
|
||||
|
||||
if (name === "cfg_if") {
|
||||
transformBlockLike(() => transform_macro_cfg_if(node.segments) as any);
|
||||
} else if (tk === DelimKind["{}"]) {
|
||||
transformBlockLike(); /* || (includesTK(node, TK[","]) && transformCallLike()); */
|
||||
} else {
|
||||
transformCallLike(); /* || (includesTK(node, TK[";"]) && transformBlockLike()); */
|
||||
}
|
||||
|
||||
function transformBlockLike(transform = () => rs.toBlockBody(node.segments)) {
|
||||
return maybe_transform_node(node as BlockLikeMacroInvocation, transform, (node, snippet) => {
|
||||
const _body = snippet.ast;
|
||||
_body.dk = tk;
|
||||
|
||||
node.body = _body;
|
||||
node.segments = _body;
|
||||
transferAttributes(snippet, node);
|
||||
});
|
||||
}
|
||||
|
||||
function transformCallLike() {
|
||||
return maybe_transform_node(
|
||||
node as CallLikeMacroInvocation,
|
||||
() => rs.toCallExpressionArguments(node.segments),
|
||||
(node, snippet) => {
|
||||
const _arguments = snippet.ast;
|
||||
_arguments.dk = tk;
|
||||
|
||||
node.method = undefined;
|
||||
node.typeArguments = undefined;
|
||||
node.arguments = _arguments;
|
||||
node.segments = _arguments;
|
||||
}
|
||||
);
|
||||
}
|
||||
},
|
||||
[NodeType.CallExpression](node) {
|
||||
if (hasMethod(node)) {
|
||||
node.callee = rs.mockNode(NodeType.MemberExpression, node.method.loc.cloneFrom(start(node.callee)), {
|
||||
expression: node.callee,
|
||||
property: node.method,
|
||||
computed: false,
|
||||
});
|
||||
node.method = undefined!;
|
||||
getOptions().actuallyMethodNodes.add(node.callee as MemberExpression);
|
||||
}
|
||||
},
|
||||
|
||||
[NodeType.AutoTraitDeclaration](node) {
|
||||
mockBodyNoBody(node);
|
||||
},
|
||||
[NodeType.NegativeImplDeclaration](node) {
|
||||
mockBodyNoBody(node);
|
||||
},
|
||||
|
||||
[NodeType.StructLiteral](node) {
|
||||
moveSpreadsToEnd(node);
|
||||
},
|
||||
[NodeType.StructPattern](node) {
|
||||
moveSpreadsToEnd(node);
|
||||
},
|
||||
};
|
||||
|
||||
function moveSpreadsToEnd(node: StructLiteral | StructPattern) {
|
||||
const props = node.properties;
|
||||
if (props.some((p, i, a) => is_StructSpread(p) && !iLast(i, a))) {
|
||||
const spreads: any[] = [];
|
||||
for (let i = 0; i < props.length; i++) {
|
||||
const prop = props[i];
|
||||
if (is_StructSpread(prop)) {
|
||||
Array_splice(props, prop, i--);
|
||||
spreads.push(prop);
|
||||
}
|
||||
}
|
||||
props.push(...spreads);
|
||||
}
|
||||
}
|
||||
|
||||
function mockBodyNoBody(node: NodeWithBodyNoBody) {
|
||||
// @ts-expect-error
|
||||
node.body = rs.createLocArray(last_of(rs.toTokens(node).ast).loc.clone(), DelimKind["{}"]);
|
||||
}
|
||||
|
||||
function transformMacroDelim(name: string, node: MacroInvocation): 1 | 2 | 3 {
|
||||
if (HARDCODED_MACRO_DELIMS.has(name)) {
|
||||
return HARDCODED_MACRO_DELIMS.get(name)!;
|
||||
}
|
||||
if (node.segments.dk === DelimKind["{}"] && includesTK(node, TK[","])) {
|
||||
return DelimKind["()"];
|
||||
}
|
||||
if (node.segments.dk === DelimKind["()"] && includesTK(node, TK[";"])) {
|
||||
return DelimKind["{}"];
|
||||
}
|
||||
return node.segments.dk;
|
||||
}
|
||||
|
||||
// export function createTransformed<S extends Node>(create: () => S): S {
|
||||
// return transformNode(create());
|
||||
// }
|
||||
|
||||
const seen = new WeakSet<Node>();
|
||||
function transformNode<T extends Node>(node: T, parent?: Node, key?: string, index?: any): T {
|
||||
if (!seen.has(node)) {
|
||||
seen.add(node);
|
||||
if (is_Snippet(node) || is_Program(node)) {
|
||||
registerPogramLike(node);
|
||||
}
|
||||
|
||||
each_childNode(node, transformNode);
|
||||
|
||||
insert_blocks(node, parent, key, index);
|
||||
|
||||
transform[node.nodeType]?.(node as any);
|
||||
|
||||
flatten_typeBounds(node);
|
||||
|
||||
transform_nodeAttributes(node);
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
function insert_blocks(node: Node, parent?: Node, key?: string, index?: any) {
|
||||
if (parent && key) {
|
||||
if (
|
||||
!is_ExpressionStatement(parent) &&
|
||||
(false ||
|
||||
// "1 + break" -> "1 + { break; }"
|
||||
is_FlowControlExpression(node) ||
|
||||
// "1 + a = b" -> "1 + { a = b; }"
|
||||
(!isReadingSnippet() && is_ReassignmentNode(node) && !(is_ReassignmentNode(parent) && parent.left === node)))
|
||||
) {
|
||||
reassignNodeProperty(blockify(node), parent, key, index);
|
||||
} else if (
|
||||
is_ClosureFunctionExpression(node) &&
|
||||
(false ||
|
||||
// "|| -> T x" -> "|| -> T { x }"
|
||||
(!!node.returnType && !is_BlockExpression(node.expression)) ||
|
||||
// "|| match x {}" -> "|| { match x {} }"
|
||||
(is_ExpressionWithBodyOrCases(node.expression) &&
|
||||
!is_BlockExpression(node.expression) &&
|
||||
!is_IfBlockExpression(node.expression)))
|
||||
) {
|
||||
node.expression = blockify(node.expression);
|
||||
}
|
||||
}
|
||||
function blockify(node: ExpressionNode) {
|
||||
const block = rs.mockNode(NodeType.BlockExpression, node.loc.clone(), {
|
||||
label: undefined,
|
||||
body: rs.createLocArray(DelimKind["{}"], node.loc.clone(), [
|
||||
rs.mockNode(NodeType.ExpressionStatement, node.loc.clone(), { semi: false, expression: node }),
|
||||
]),
|
||||
});
|
||||
transferAttributes(node, block);
|
||||
return block;
|
||||
}
|
||||
}
|
||||
|
||||
function flatten_typeBounds(topNode: Node) {
|
||||
if (hasTypeBounds(topNode)) {
|
||||
const nestedBounds: TypeTraitBound[] = topNode.typeBounds.filter(isBoundWithNestedBounds);
|
||||
const [first, ...subsequent] = nestedBounds;
|
||||
|
||||
const flatten = (bound: TypeTraitBound) =>
|
||||
Array_replace(topNode.typeBounds, bound, ...(bound.typeExpression as unknown as TypeDynBounds).typeBounds);
|
||||
|
||||
if (nestedBounds.every(isBareBoundWithNestedBoundsNoPrefix)) {
|
||||
// A + (B + C)
|
||||
// -> A + B + C
|
||||
each(nestedBounds, flatten);
|
||||
} else if (
|
||||
!hasDefinedPrefix(topNode) &&
|
||||
first === topNode.typeBounds[0] &&
|
||||
!isBareBoundWithNestedBoundsNoPrefix(first) &&
|
||||
subsequent.every(isBareBoundWithNestedBoundsNoPrefix)
|
||||
) {
|
||||
if (is_TypeDynBounds(topNode)) {
|
||||
if (is_TypeImplBounds(first.typeExpression)) {
|
||||
// (impl A) + B
|
||||
// -> impl A + B
|
||||
unsafe_set_nodeType(topNode, NodeType.TypeImplBounds);
|
||||
} else {
|
||||
// (dyn A) + B
|
||||
// -> dyn A + B
|
||||
topNode.dyn = true;
|
||||
}
|
||||
each(nestedBounds, flatten);
|
||||
} else {
|
||||
each(subsequent, flatten);
|
||||
(first.typeExpression as unknown as TypeDynBounds).typeBounds.push(...topNode.typeBounds.slice(1));
|
||||
topNode.typeBounds.length = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isBoundWithNestedBounds(bound: TypeBound): bound is TypeTraitBound & { typeExpression: TypeBoundsStandaloneNode } {
|
||||
return is_TypeTraitBound(bound) && is_TypeBoundsStandaloneNode(bound.typeExpression);
|
||||
}
|
||||
function isBareBoundWithNestedBounds(bound: TypeBound): bound is TypeTraitBound & { typeExpression: TypeBoundsStandaloneNode } {
|
||||
return isBoundWithNestedBounds(bound) && is_BareTypeTraitBound(bound);
|
||||
}
|
||||
function isBareBoundWithNestedBoundsNoPrefix(bound: TypeBound): bound is TypeTraitBound & { typeExpression: TypeDynBounds } {
|
||||
return isBareBoundWithNestedBounds(bound) && !hasDefinedPrefix(bound.typeExpression);
|
||||
}
|
||||
function hasDefinedPrefix(node: NodeWithTypeBounds) {
|
||||
return (is_TypeDynBounds(node) && node.dyn) || is_TypeImplBounds(node);
|
||||
}
|
||||
}
|
||||
|
||||
function transform_nodeAttributes(node: Node) {
|
||||
/**
|
||||
* # Inside Token trees:
|
||||
*
|
||||
* 1. DocCommentAttribute --is parsed as--> Comment
|
||||
* 2. Attribute --is parsed as--> Token<'#'>, DelimGroup<'[]'>
|
||||
*
|
||||
* # Transforming tokens into a Snippet:
|
||||
*
|
||||
* 1. DocCommentAttribute <--replace from-- Comment
|
||||
* a) Remove node with same loc from comments
|
||||
* b) Merge Snippet.danglingAttributes with Program.danglingAttributes
|
||||
*
|
||||
* 2. Attribute (no action needed)
|
||||
*
|
||||
*/
|
||||
if (hasAttributes(node)) {
|
||||
const attrs = node.attributes;
|
||||
for (let i = 0; i < attrs.length; i++) {
|
||||
const attr = attrs[i];
|
||||
if (isReadingSnippet() && is_DocCommentAttribute(attr)) {
|
||||
const index = binarySearchIn(_COMMENTS, start(attr), start);
|
||||
__DEV__: assert(index !== -1), assert(end(_COMMENTS[index]) === end(attr));
|
||||
_COMMENTS.splice(index, 1);
|
||||
}
|
||||
if (attr.inner) {
|
||||
if (isPrettierIgnoreAttribute(attr)) {
|
||||
setPrettierIgnoreTarget(is_Program(node) ? node.loc.src : node, attr);
|
||||
}
|
||||
// @ts-expect-error Inserting Attribute into StatementNode[]
|
||||
insertNode(is_Snippet(node) ? node.ast : getBodyOrCases(node)!, attr);
|
||||
Array_splice(attrs, attr, i--);
|
||||
}
|
||||
}
|
||||
if (attrs.length === 0) {
|
||||
deleteAttributes(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function registerPogramLike(program: Extract<Node, ProgramLike>) {
|
||||
const comments = spliceAll(program.comments);
|
||||
const danglingAttributes = spliceAll(program.danglingAttributes);
|
||||
for (let i = 0; i < danglingAttributes.length; i++) {
|
||||
const attr = danglingAttributes[i];
|
||||
// if (isReadingSnippet() && is_DocCommentAttribute(attr)) {
|
||||
// }
|
||||
if (is_DocCommentAttribute(attr)) {
|
||||
if (isReadingSnippet()) {
|
||||
const index = binarySearchIn(_COMMENTS, start(attr), start);
|
||||
__DEV__: assert(index !== -1), assert(end(_COMMENTS[index]) === end(attr));
|
||||
_COMMENTS.splice(index, 1);
|
||||
}
|
||||
} else {
|
||||
transformNode(danglingAttributes[i], program, "danglingAttributes", i);
|
||||
}
|
||||
}
|
||||
if (!isReadingSnippet()) insertNodes(_COMMENTS, comments);
|
||||
insertNodes(_DANGLING_ATTRIBUTES, danglingAttributes);
|
||||
}
|
||||
|
||||
const CommentChildNodes = new WeakMap<Node, Node[]>();
|
||||
|
||||
export function getCommentChildNodes(n: any): Node[] {
|
||||
const children = Map_get(CommentChildNodes, n, getTransformedNodeChildren);
|
||||
/**
|
||||
* parent {
|
||||
* #[attr]
|
||||
* #![attr] <-------- list misplaced inner attrs as part of "#[attr] child {}"
|
||||
* child {}
|
||||
* }
|
||||
*/
|
||||
if (is_NodeWithBodyOrCases(n) || is_BlockLikeMacroInvocation(n)) {
|
||||
for (let i = 0; i < children.length; i++) {
|
||||
const attr = children[i];
|
||||
if (is_AttributeOrDocComment(attr)) {
|
||||
const target = children.find((n) => start(n) <= start(attr) && ownStart(n) >= end(attr));
|
||||
if (target) {
|
||||
children.splice(i--, 1);
|
||||
insertNode(Map_get(CommentChildNodes, target, getTransformedNodeChildren), attr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return children;
|
||||
|
||||
function getTransformedNodeChildren(node: Node) {
|
||||
if (is_Program(node)) node.comments ??= []; // prettier core deletes this property
|
||||
const children = getNodeChildren(node);
|
||||
|
||||
if (is_NodeWithBodyNoBody(node)) {
|
||||
insertNodes(children, (node as any).body);
|
||||
}
|
||||
|
||||
__DEV__: {
|
||||
const actual_count = countActualNodeChildren(node);
|
||||
if (
|
||||
children.length !== actual_count &&
|
||||
!(is_MacroInvocation(node) && actual_count - node.segments.length === children.length)
|
||||
) {
|
||||
const actual = getActualNodeChildren(node);
|
||||
const missing = actual.filter((n) => !children.includes(n));
|
||||
const unknown = children.filter((n) => !actual.includes(n));
|
||||
const duplicates_in_object = actual.filter((n, i, a) => i !== 0 && n === a[i - 1]);
|
||||
const duplicates_in_childNodes = children.filter((n, i, a) => i !== 0 && n === a[i - 1]);
|
||||
const ctx = { missing, unknown, duplicates_in_object, duplicates_in_childNodes };
|
||||
for (let key in ctx) if (ctx[key].length === 0) delete ctx[key];
|
||||
exit(`${node.type} was transformed but did not patch its childNodes list`, ctx, node.loc.url(), node);
|
||||
}
|
||||
for (const child of children)
|
||||
if (!is_Node(child)) exit(`${node.type}'s childNodes includes unexpected entries`, { node, child });
|
||||
}
|
||||
return children;
|
||||
}
|
||||
}
|
||||
@@ -1,246 +0,0 @@
|
||||
import { createCustomError } from "./debug";
|
||||
|
||||
declare global {
|
||||
interface ImportMeta {
|
||||
url: string;
|
||||
}
|
||||
}
|
||||
|
||||
export function Narrow<T extends R, R = unknown>(value: R): asserts value is T {}
|
||||
export function AssertTypesEq<A extends B, B>(...args: [B] extends [A] ? [] : [RIGHT_TYPES_NOT_ASSIGNABLE_TO_LEFT: Exclude<B, A>]) {}
|
||||
|
||||
// prettier-ignore
|
||||
type indexof<A> = A extends readonly any[] ? A extends 0 ? any : keyof A & number : A extends Set<unknown> ? never : A extends Map<infer U, unknown> ? U
|
||||
: A extends Iterable<unknown> ? never : A extends object ? keyof A & (number | string) : never;
|
||||
// prettier-ignore
|
||||
type valueof<A> = A extends ReadonlyArray<infer U> ? A extends 0 ? any : U : A extends Set<infer U> ? U : A extends Map<unknown, infer U> ? U
|
||||
: A extends Iterable<infer U> ? U : A extends object ? A[keyof A & (number | string)] : never;
|
||||
// prettier-ignore
|
||||
type vObject<V extends unknown = unknown, K extends unknown = unknown> = | object | readonly V[] | { [key: string]: V } | anySet<V> | anyMap<K, V>;
|
||||
export type itfn<A, R> = (value: valueof<A>, key: indexof<A>) => R;
|
||||
type anySet<V extends unknown = unknown> = Set<V>;
|
||||
type anyMap<K extends unknown = unknown, V extends unknown = unknown> = Map<K, V>;
|
||||
type anyfunction<A extends any[] = unknown[], R = unknown> = (...args: A) => R;
|
||||
type objlike = object | anyfunction;
|
||||
type anymap<K extends unknown = unknown, V extends unknown = unknown> = K extends objlike ? Map<K, V> | WeakMap<K, V> : Map<K, V>;
|
||||
|
||||
export function exit(message: string, ...ctx: any[]): never {
|
||||
if (ctx.length > 0) console.log("Error context:", { ...ctx });
|
||||
throw createCustomError({ message });
|
||||
}
|
||||
exit.never = function never(...ctx: any[]): never {
|
||||
exit("Reached unreachable code", ...ctx);
|
||||
};
|
||||
export function assert(predicate: boolean, err?: string, ...ctx: any[]): asserts predicate {
|
||||
__DEV__: if (typeof predicate !== "boolean") exit("Expected boolean", predicate);
|
||||
if (false === predicate) exit(err ?? "Assertion failed", ...ctx);
|
||||
}
|
||||
export function Identity<T>(v: T): T {
|
||||
return v;
|
||||
}
|
||||
|
||||
export function last_of<T extends ArrayLike<any>>(arr: T): T extends readonly [...infer A, infer U] ? U : T[number] {
|
||||
__DEV__: isArrayLike(arr) || exit("Expected Array"), arr.length > 0 || exit("Attempted to retrieve last item of an empty array", arr);
|
||||
return arr[arr.length - 1];
|
||||
}
|
||||
export function maybe_last_of<T extends readonly any[] | undefined>(
|
||||
arr: T
|
||||
): T extends any[] ? (T extends readonly [...infer A, infer U] ? U : T[number]) : undefined {
|
||||
return (undefined === arr || 0 === arr.length ? undefined : last_of(arr as any[])) as any;
|
||||
}
|
||||
|
||||
export function normPath(filepath: string) {
|
||||
return filepath.replace(/^file:\/\/\//, "").replace(/\\\\?/g, "/");
|
||||
}
|
||||
|
||||
export function print_string(str: string) {
|
||||
return /[\u0000-\u0020]/.test(str)
|
||||
? str
|
||||
.replace(/ /g, "•")
|
||||
.replace(/\n/g, "↲")
|
||||
.replace(/\t/g, "╚")
|
||||
.replace(/[\u0000-\u0020]/g, "□")
|
||||
: str;
|
||||
}
|
||||
|
||||
function isArrayLike(value: any): value is ArrayLike<unknown> {
|
||||
return is_object(value) && oisArrayLike(value);
|
||||
}
|
||||
|
||||
function oisArrayLike(value: {}): value is ArrayLike<unknown> {
|
||||
return "length" in value && (0 === (value as any).length || "0" in value);
|
||||
}
|
||||
|
||||
export function binarySearchIn<T extends {}>(array: ArrayLike<T>, target: number, toValue: (item: T) => number) {
|
||||
if (isEmpty(array)) return -1;
|
||||
let i = 0;
|
||||
let low = 0;
|
||||
let high = array.length - 1;
|
||||
let value = toValue(array[high]);
|
||||
if (target >= value) return high;
|
||||
else high--;
|
||||
while (low <= high) {
|
||||
i = low + ((high - low) >> 1);
|
||||
value = toValue(array[i]);
|
||||
if (target === value) return i;
|
||||
if (target > value) low = i + 1;
|
||||
else high = i - 1;
|
||||
}
|
||||
return low - 1;
|
||||
}
|
||||
|
||||
export function getTerminalWidth(fallbackWidth = 200) {
|
||||
return globalThis?.process?.stdout?.columns ?? fallbackWidth;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined";
|
||||
export const color = ((cfn, mfn) => ({
|
||||
black: cfn(30),
|
||||
red: cfn(31),
|
||||
green: cfn(32),
|
||||
yellow: cfn(33),
|
||||
blue: cfn(34),
|
||||
magenta: cfn(35),
|
||||
cyan: cfn(36),
|
||||
white: cfn(37),
|
||||
grey: cfn(90),
|
||||
bold: mfn(1, 22),
|
||||
italic: mfn(3, 23),
|
||||
underline: mfn(4, 24),
|
||||
hidden: mfn(8, 28),
|
||||
hiddenCursor: (str: string) => `\x1B[?25l${str}\x1B[?25h`,
|
||||
unstyle: (str: string) => str.replace(/\x1B\[[0-9][0-9]?m/g, ""),
|
||||
unstyledLength: (str: string) => str.replace(/\x1B\[[0-9][0-9]?m/g, "").length,
|
||||
link: (str: string) => color.underline(color.blue(str)),
|
||||
}))(
|
||||
(c1: number) => (isBrowser ? Identity : (str: string) => `\x1B[${c1}m${str.replace(/\x1B\[39m/g, `\x1B[${c1}m`)}\x1B[39m`),
|
||||
(c1: number, c2: number) => (isBrowser ? Identity : (str: string) => `\x1B[${c1}m${str}\x1B[${c2}m`)
|
||||
);
|
||||
export function Map_get<K extends object, V>(map: WeakMap<K, V>, key: K, init: (key: K) => V): V;
|
||||
export function Map_get<K, V>(map: Map<K, V>, key: K, init: (key: K) => V): V;
|
||||
export function Map_get<K, V>(map: anymap<K, V>, key: K, init: (key: K) => V): V {
|
||||
if (!map.has(key)) map.set(key, init(key));
|
||||
return map.get(key)!;
|
||||
}
|
||||
export function isEmpty(array: ArrayLike<any>): boolean {
|
||||
__DEV__: assert(isArrayLike(array));
|
||||
return 0 === array.length;
|
||||
}
|
||||
export function Array_splice<T extends any[]>(array: T, target: T[number], index: number = array.indexOf(target)) {
|
||||
__DEV__: {
|
||||
const i = arguments.length === 2 ? array.indexOf(target) : index;
|
||||
assert(i === index && i !== -1 && i === array.lastIndexOf(target), "", { array, target, index, i });
|
||||
}
|
||||
array.splice(index, 1);
|
||||
}
|
||||
export function Array_replace<T extends any[]>(array: T, target: T[number], ...replacements: T[number][]) {
|
||||
const i = array.indexOf(target);
|
||||
__DEV__: if (i === -1 || i !== array.lastIndexOf(target))
|
||||
exit("Array_replace", { index: i, lastIndex: array.lastIndexOf(target), array, target, replacements });
|
||||
array.splice(array.indexOf(target), 1, ...replacements);
|
||||
}
|
||||
export function has_key_defined<T extends object, K extends T extends never ? never : keyof T>(
|
||||
o: T,
|
||||
k: K
|
||||
): o is K extends never
|
||||
? never
|
||||
: T extends { [k in K]: any }
|
||||
? T & { [k in K]: {} }
|
||||
: T extends { [k in K]?: any }
|
||||
? T & { [k in K]: {} }
|
||||
: never {
|
||||
return k in o && undefined !== o[k];
|
||||
}
|
||||
|
||||
export function is_object(data: unknown): data is object | ({ [key: string]: unknown } | unknown[]) {
|
||||
return "object" === typeof data && null !== data;
|
||||
}
|
||||
|
||||
export function is_array(data: unknown): data is any[] {
|
||||
return Array.isArray(data);
|
||||
}
|
||||
|
||||
function ois_vobject(data: any) {
|
||||
__DEV__: assert(is_object(data));
|
||||
switch (data.constructor) {
|
||||
case Array:
|
||||
case Object:
|
||||
case Set:
|
||||
case Map:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function each<A extends vObject>(data: A, callback: itfn<A, void>): void;
|
||||
export function each(data: any, callback: (value: any, index: any) => void): void {
|
||||
__DEV__: assert(ois_vobject(data));
|
||||
// prettier-ignore
|
||||
switch (data.constructor) {
|
||||
case Array: { let i = 0; for (; i < data.length; i++) callback(data[i], i); return; }
|
||||
case Object: { let k; for (k in data) callback(data[k], k); return; }
|
||||
case Set: { let d; for (d of data) callback(d, undefined!); return; }
|
||||
case Map: { let e; for (e of data) callback(e[1], e[0]); return; }
|
||||
default: { let x; for (x of data) callback(x, undefined!); return; }
|
||||
}
|
||||
}
|
||||
|
||||
export function iLast(index: number, array: any[]) {
|
||||
return 1 + index === array.length;
|
||||
}
|
||||
|
||||
export function find_last<T>(arr: T[], test: itfn<T[], boolean>): T | undefined {
|
||||
for (var i = arr.length; --i !== -1; ) if (test(arr[i], i)) return arr[i];
|
||||
}
|
||||
|
||||
export function try_eval<T>(fn: () => T): T | undefined {
|
||||
try {
|
||||
return fn();
|
||||
} catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export function clamp(min: number, max: number, value: number) {
|
||||
return value > min ? (value < max ? value : max) : min;
|
||||
}
|
||||
|
||||
export type MaybeFlatten<T> = T extends ReadonlyArray<infer U> ? MaybeFlatten<Exclude<U, T>> : T;
|
||||
export type FlatArray<T> = MaybeFlatten<T>[];
|
||||
export function flat<T extends readonly any[]>(arr: T): FlatArray<T> {
|
||||
return (arr as any as [any]).flat(Infinity);
|
||||
}
|
||||
export function flatMap<T extends readonly any[], R>(arr: T, mapFn: (item: T[number], index: number, array: T) => R): FlatArray<R> {
|
||||
return flat(arr.map(mapFn as any));
|
||||
}
|
||||
|
||||
export function joinln(...arr: string[]): string {
|
||||
return arr.join("\n");
|
||||
}
|
||||
|
||||
export function join_lines(fn: () => Generator<string, void, void>): string {
|
||||
return [...fn()].join("\n");
|
||||
}
|
||||
|
||||
export function reduce_tagged_template<T>(args: [strings: TemplateStringsArray, ...values: T[]], map: (value: T) => string) {
|
||||
for (var str = "" + args[0][0], i = 1; i < args.length; i++) str += map(args[i] as T) + args[0][i];
|
||||
return str;
|
||||
}
|
||||
|
||||
export function map_tagged_template<T, R>(args: [strings: TemplateStringsArray, ...values: T[]], map: (value: T) => R) {
|
||||
const arr: (R | string)[] = [args[0][0]];
|
||||
for (var i = 1; i < args.length; i++) arr.push(map(args[i] as T), args[0][i]);
|
||||
return arr;
|
||||
}
|
||||
|
||||
export function spliceAll<T extends any[]>(array: T): [...T] {
|
||||
const r: [...T] = [...array];
|
||||
array.length = 0;
|
||||
return r;
|
||||
}
|
||||
|
||||
export function spread<R>(fn: () => Iterable<R>): R[] {
|
||||
return [...fn()];
|
||||
}
|
||||
@@ -1,141 +0,0 @@
|
||||
import { clamp, color, getTerminalWidth, normPath } from "./common";
|
||||
|
||||
const cwd =
|
||||
typeof process === "object" && typeof process?.cwd === "function" ? /* @__PURE__ */ normPath(/* @__PURE__ */ process.cwd() ?? "") : "";
|
||||
function normPath_strip_cwd(filepath: string) {
|
||||
let normFilePath = normPath(filepath);
|
||||
return normFilePath.startsWith(cwd) ? normFilePath.slice(cwd.length + 1) : normFilePath;
|
||||
}
|
||||
|
||||
type StackStyleFn = (callee: string, item: StackItem) => (str: string) => string;
|
||||
interface Stack extends Array<StackItem> {
|
||||
message: string;
|
||||
style?: { callee?: StackStyleFn; url?: StackStyleFn } | undefined;
|
||||
}
|
||||
|
||||
class StackLine {
|
||||
declare readonly raw: string;
|
||||
declare readonly callee: string;
|
||||
declare readonly filepath: string;
|
||||
declare readonly line: string;
|
||||
declare readonly col: string;
|
||||
declare readonly other: string;
|
||||
declare readonly url: string;
|
||||
constructor(raw: string) {
|
||||
({
|
||||
1: this.callee = "",
|
||||
2: this.filepath = "",
|
||||
3: this.line = "",
|
||||
4: this.col = "",
|
||||
5: this.other = "",
|
||||
} = (this.raw = raw).match(/at (?:(.+?)\s+\()?(?:(.+?):([0-9]+)(?::([0-9]+))?|([^)]+))\)?/) ?? ["", "", "", "", "", ""]);
|
||||
this.url = this.filepath //
|
||||
? normPath_strip_cwd(this.filepath) + (this.line && this.col && `:${this.line}:${this.col}`)
|
||||
: this.other === "native"
|
||||
? "<native>"
|
||||
: "";
|
||||
}
|
||||
}
|
||||
|
||||
function getPrintWidth() {
|
||||
return clamp(0, getTerminalWidth(128), 200) - 4;
|
||||
}
|
||||
|
||||
class StackItem extends StackLine {
|
||||
constructor(private readonly stack: Stack, readonly i: number, raw: string) {
|
||||
super(raw);
|
||||
}
|
||||
hidden = false;
|
||||
hide() {
|
||||
this.hidden = true;
|
||||
return this;
|
||||
}
|
||||
hideNext(n: number) {
|
||||
for (let i = 0; i < n; i++) this.at(i)?.hide();
|
||||
}
|
||||
hideWhileTrue(test: (line: StackItem) => boolean) {
|
||||
let line: StackItem | undefined = this;
|
||||
while (line && test(line)) line = line.hide().next();
|
||||
}
|
||||
at(relIndex: number) {
|
||||
return this.i + relIndex >= this.stack.length || this.i + relIndex < 0 ? undefined : this.stack[this.i + relIndex];
|
||||
}
|
||||
next() {
|
||||
return this.at(+1);
|
||||
}
|
||||
toString() {
|
||||
const url = this.url;
|
||||
const calleeColor = this.stack.style?.callee?.(this.callee, this) ?? color.cyan;
|
||||
const urlColor = this.stack.style?.url?.(url, this) ?? color.grey;
|
||||
return compose2Cols(" at " + calleeColor(this.callee), urlColor(url), getPrintWidth());
|
||||
}
|
||||
}
|
||||
|
||||
// prettier-ignore
|
||||
function createStack(message: string, Error_stack: string, style: Stack["style"]): Stack {
|
||||
for (var STACK: Stack = [] as any, i = 0, stack = Error_stack.split("\n").slice(2); i < stack.length; i++) STACK[i] = new StackItem(STACK, i, stack[i]);
|
||||
return (STACK.message = message), (STACK.style = style), STACK;
|
||||
}
|
||||
|
||||
function composeStack(stack: Stack) {
|
||||
var hidden = 0;
|
||||
var str = stack.message;
|
||||
for (var item of stack) item.hidden ? ++hidden : (str += "\n" + item.toString());
|
||||
return str + (hidden > 0 ? "\n" + color.grey(compose2Cols("", `...filtered ${hidden} lines`, getPrintWidth())) : "");
|
||||
}
|
||||
|
||||
export function get_caller_cmd(offset = 0) {
|
||||
const obj: { stack: string } = {} as any;
|
||||
Error.captureStackTrace(obj, get_caller_cmd);
|
||||
const lines = obj.stack.split("\n");
|
||||
return new StackLine(lines[1 + clamp(0, lines.length - 2, offset)]).url;
|
||||
}
|
||||
|
||||
var Error_prepareStackTrace;
|
||||
let replaced_default_prepareStackTrace = false;
|
||||
function custom_prepareStackTrace(err, calls) {
|
||||
return (Error_prepareStackTrace?.(err, calls) ?? calls.join("\n"))?.replace(/file:\/\/\//g, "").replace(/\\\\?/g, "/") ?? calls;
|
||||
}
|
||||
|
||||
export function overrideDefaultError(silent = false) {
|
||||
if (replaced_default_prepareStackTrace === (replaced_default_prepareStackTrace = true)) return;
|
||||
Error_prepareStackTrace = Error.prepareStackTrace ?? ((_, calls) => calls.join("\n"));
|
||||
Error.prepareStackTrace = custom_prepareStackTrace;
|
||||
if (!silent) console.log(color.grey(`[devtools] Replaced Error.prepareStackTrace at ${get_caller_cmd(1)}`));
|
||||
}
|
||||
|
||||
export function createCustomError({
|
||||
message = "Unknown Error",
|
||||
editStack = (stack: StackItem[]) => {},
|
||||
style = undefined as Stack["style"],
|
||||
stackTraceLimit = 20,
|
||||
}): Error {
|
||||
const _stackTraceLimit = Error.stackTraceLimit;
|
||||
const _prepareStackTrace = Error.prepareStackTrace;
|
||||
if (replaced_default_prepareStackTrace && _prepareStackTrace === custom_prepareStackTrace)
|
||||
Error.prepareStackTrace = Error_prepareStackTrace;
|
||||
|
||||
Error.stackTraceLimit = stackTraceLimit;
|
||||
|
||||
const _ctx: { stack: string } = {} as any;
|
||||
|
||||
Error.captureStackTrace(_ctx, createCustomError);
|
||||
|
||||
const stack = createStack(message, _ctx.stack, style);
|
||||
Error.prepareStackTrace = function (err, calls) {
|
||||
editStack(stack);
|
||||
return composeStack(stack);
|
||||
};
|
||||
|
||||
const err = new Error(message); // (get) to trigger prepareStackTrace, (set) to prevent treeshaking
|
||||
err.stack = err.stack;
|
||||
|
||||
Error.stackTraceLimit = _stackTraceLimit;
|
||||
Error.prepareStackTrace = _prepareStackTrace;
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
function compose2Cols(left: string, right: string, len = 64, min = 1) {
|
||||
return left + " ".repeat(clamp(min, len, len - (color.unstyledLength(left) + color.unstyledLength(right)))) + right;
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
/**
|
||||
* Prettier Plugin for Rust formatting using rust_fmt WebAssembly
|
||||
*
|
||||
* This plugin provides support for formatting Rust files using the rust_fmt WASM implementation.
|
||||
*/
|
||||
import type { Plugin, Parser, Printer } from 'prettier';
|
||||
|
||||
// Import the rust_fmt WASM module
|
||||
import rustFmtInit, { format, type Config } from './rust_fmt_vite.js';
|
||||
|
||||
const parserName = 'rust';
|
||||
|
||||
// Language configuration
|
||||
const languages = [
|
||||
{
|
||||
name: 'Rust',
|
||||
aliases: ['rust', 'rs'],
|
||||
parsers: [parserName],
|
||||
extensions: ['.rs', '.rs.in'],
|
||||
aceMode: 'rust',
|
||||
tmScope: 'source.rust',
|
||||
linguistLanguageId: 327,
|
||||
vscodeLanguageIds: ['rust']
|
||||
}
|
||||
];
|
||||
|
||||
// Parser configuration
|
||||
const rustParser: Parser<string> = {
|
||||
astFormat: parserName,
|
||||
parse: (text: string) => text,
|
||||
locStart: () => 0,
|
||||
locEnd: (node: string) => node.length,
|
||||
};
|
||||
|
||||
// Initialize rust_fmt WASM module
|
||||
let initPromise: Promise<void> | null = null;
|
||||
let isInitialized = false;
|
||||
|
||||
function initRustFmt(): Promise<void> {
|
||||
if (initPromise) {
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
initPromise = (async () => {
|
||||
if (!isInitialized) {
|
||||
await rustFmtInit();
|
||||
isInitialized = true;
|
||||
}
|
||||
})();
|
||||
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
// Printer configuration
|
||||
const rustPrinter: Printer<string> = {
|
||||
print: (path, options) => {
|
||||
try {
|
||||
if (!isInitialized) {
|
||||
console.warn('rust_fmt WASM module not initialized, returning original text');
|
||||
return (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
}
|
||||
|
||||
const text = (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
const config = getRustFmtConfig(options);
|
||||
|
||||
// Format using rust_fmt (synchronous call)
|
||||
const formatted = format(text, config);
|
||||
|
||||
return formatted.trim();
|
||||
} catch (error) {
|
||||
console.warn('Rust formatting failed:', error);
|
||||
// Return original text if formatting fails
|
||||
return (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Helper function to create rust_fmt config from Prettier options
|
||||
function getRustFmtConfig(options: any): Config {
|
||||
const config: Config = {};
|
||||
|
||||
// Map Prettier options to rust_fmt config
|
||||
if (options.useTabs !== undefined) {
|
||||
config.use_tabs = options.useTabs;
|
||||
}
|
||||
|
||||
// Note: rust_fmt currently only supports use_tabs option
|
||||
// Future versions may support more options like tab_width
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
// Plugin options
|
||||
const options = {
|
||||
// Currently rust_fmt only supports use_tabs option
|
||||
// The tab width and other formatting options are handled by prettyplease internally
|
||||
};
|
||||
|
||||
// Plugin definition
|
||||
const rustPlugin: Plugin = {
|
||||
languages,
|
||||
parsers: {
|
||||
[parserName]: rustParser,
|
||||
},
|
||||
printers: {
|
||||
[parserName]: rustPrinter,
|
||||
},
|
||||
options,
|
||||
};
|
||||
|
||||
// Initialize the WASM module
|
||||
initRustFmt().catch(error => {
|
||||
console.error('Failed to initialize rust_fmt WASM module:', error);
|
||||
});
|
||||
|
||||
export default rustPlugin;
|
||||
export { languages };
|
||||
export const parsers = rustPlugin.parsers;
|
||||
export const printers = rustPlugin.printers;
|
||||
@@ -1,109 +0,0 @@
|
||||
import { createScalaPrinter } from "./printer";
|
||||
import { parse, type ScalaCstNode, type IToken } from "./scala-parser";
|
||||
import { type Plugin, type SupportOption } from "prettier";
|
||||
|
||||
/**
|
||||
* Prettierがサポートする言語の定義
|
||||
*/
|
||||
const languages = [
|
||||
{
|
||||
name: "Scala",
|
||||
parsers: ["scala"],
|
||||
extensions: [".scala", ".sc"],
|
||||
vscodeLanguageIds: ["scala"],
|
||||
},
|
||||
];
|
||||
|
||||
/**
|
||||
* Scalaパーサーの定義
|
||||
*/
|
||||
const parsers = {
|
||||
scala: {
|
||||
parse: (text: string) => {
|
||||
const result = parse(text);
|
||||
|
||||
// シンプルなコメント保持: ASTに格納してvisitorで処理
|
||||
const ast = {
|
||||
...result.cst,
|
||||
comments: [], // Prettierの検証を回避
|
||||
originalComments: result.comments || [], // プラグイン独自のコメント格納
|
||||
type: "compilationUnit",
|
||||
};
|
||||
return ast;
|
||||
},
|
||||
astFormat: "scala-cst",
|
||||
locStart: (node: ScalaCstNode | IToken) => {
|
||||
// Handle comment tokens (from Chevrotain lexer)
|
||||
if ("startOffset" in node && node.startOffset !== undefined) {
|
||||
return node.startOffset;
|
||||
}
|
||||
// Handle CST nodes
|
||||
if ("location" in node && node.location?.startOffset !== undefined) {
|
||||
return node.location.startOffset;
|
||||
}
|
||||
return 0;
|
||||
},
|
||||
locEnd: (node: ScalaCstNode | IToken) => {
|
||||
// Handle comment tokens (from Chevrotain lexer)
|
||||
if ("endOffset" in node && node.endOffset !== undefined) {
|
||||
return node.endOffset + 1; // Chevrotain endOffset is inclusive, Prettier expects exclusive
|
||||
}
|
||||
// Handle CST nodes
|
||||
if ("location" in node && node.location?.endOffset !== undefined) {
|
||||
return node.location.endOffset + 1; // Chevrotain endOffset is inclusive, Prettier expects exclusive
|
||||
}
|
||||
return 1;
|
||||
},
|
||||
hasPragma: () => false,
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* プリンターの定義
|
||||
*/
|
||||
const printers = {
|
||||
"scala-cst": createScalaPrinter(),
|
||||
};
|
||||
|
||||
/**
|
||||
* プラグインオプション(scalafmt互換性 - フェーズ1)
|
||||
*/
|
||||
const options: Record<string, SupportOption> = {
|
||||
// Prettier standard options with Scala-specific defaults
|
||||
semi: {
|
||||
type: "boolean",
|
||||
default: false, // Scala convention: omit semicolons
|
||||
description: "Add semicolons at the end of statements",
|
||||
category: "Global",
|
||||
} as const,
|
||||
|
||||
// Deprecated options (backward compatibility)
|
||||
scalaLineWidth: {
|
||||
type: "int",
|
||||
default: 80,
|
||||
description: "Maximum line width (DEPRECATED: use printWidth instead)",
|
||||
category: "Scala",
|
||||
} as const,
|
||||
scalaIndentStyle: {
|
||||
type: "choice",
|
||||
default: "spaces",
|
||||
choices: [
|
||||
{ value: "spaces", description: "Use spaces for indentation" },
|
||||
{ value: "tabs", description: "Use tabs for indentation" },
|
||||
],
|
||||
description: "Indentation style (DEPRECATED: use useTabs instead)",
|
||||
category: "Scala",
|
||||
} as const,
|
||||
};
|
||||
|
||||
/**
|
||||
* Prettierプラグインのエクスポート
|
||||
*/
|
||||
const plugin: Plugin = {
|
||||
languages,
|
||||
parsers,
|
||||
printers,
|
||||
options,
|
||||
};
|
||||
|
||||
export default plugin;
|
||||
@@ -1,91 +0,0 @@
|
||||
import { CstNodeVisitor, type CSTNode } from "./visitor";
|
||||
import type { ScalaCstNode, IToken } from "./scala-parser";
|
||||
import { type Doc, type Printer, type AstPath, type Options } from "prettier";
|
||||
|
||||
/**
|
||||
* Scala用のPrettierプリンターを作成
|
||||
* @returns Prettierプリンターオブジェクト
|
||||
*/
|
||||
export function createScalaPrinter(): Printer {
|
||||
return {
|
||||
/**
|
||||
* ASTノードをフォーマット済みのテキストに変換
|
||||
* @param path - 現在のノードへのパス
|
||||
* @param options - Prettierオプション
|
||||
* @param print - 子ノードを印刷するための関数
|
||||
* @returns フォーマット済みのDoc
|
||||
*/
|
||||
print(
|
||||
path: AstPath<ScalaCstNode>,
|
||||
options: Options,
|
||||
print: (path: AstPath) => Doc,
|
||||
): Doc {
|
||||
const node = path.getValue();
|
||||
|
||||
const visitor = new CstNodeVisitor();
|
||||
const result = visitor.visit(node, {
|
||||
path,
|
||||
options: {
|
||||
printWidth: options.printWidth,
|
||||
tabWidth: options.tabWidth,
|
||||
useTabs: options.useTabs,
|
||||
semi: options.semi,
|
||||
singleQuote: options.singleQuote,
|
||||
trailingComma:
|
||||
options.trailingComma === "es5" ? "all" : options.trailingComma,
|
||||
},
|
||||
print: (childNode: CSTNode) => {
|
||||
// 子ノード用のモックパスを作成
|
||||
const mockPath = {
|
||||
getValue: () => childNode,
|
||||
call: (fn: () => unknown) => fn(),
|
||||
};
|
||||
return String(print(mockPath as AstPath<unknown>));
|
||||
},
|
||||
indentLevel: 0,
|
||||
});
|
||||
|
||||
// 文字列結果をPrettierのDocに変換
|
||||
return result;
|
||||
},
|
||||
/**
|
||||
* コメントを印刷
|
||||
* @param path - コメントトークンへのパス
|
||||
* @returns フォーマット済みのコメント
|
||||
*/
|
||||
printComment(path: AstPath<IToken>): Doc {
|
||||
const comment = path.getValue();
|
||||
if (!comment) return "";
|
||||
|
||||
// Chevrotainのimageプロパティを使用
|
||||
if (typeof comment.image === "string") {
|
||||
return comment.image;
|
||||
}
|
||||
|
||||
// fallback
|
||||
if (typeof comment.image === "string") {
|
||||
return comment.image;
|
||||
}
|
||||
|
||||
// デバッグ: コメント構造を確認
|
||||
console.log("Unexpected comment structure in printComment:", comment);
|
||||
return "";
|
||||
},
|
||||
canAttachComment(): boolean {
|
||||
// コメント機能を一時的に無効化
|
||||
return false;
|
||||
},
|
||||
willPrintOwnComments(): boolean {
|
||||
return false; // Prettier標準のコメント処理を使用しない
|
||||
},
|
||||
insertPragma(text: string): string {
|
||||
return text;
|
||||
},
|
||||
hasPrettierIgnore(): boolean {
|
||||
return false;
|
||||
},
|
||||
isBlockComment(comment: IToken): boolean {
|
||||
return comment.tokenType?.name === "BlockComment";
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1,206 +0,0 @@
|
||||
import { ScalaLexer } from "./lexer";
|
||||
import { parserInstance } from "./parser";
|
||||
import type {
|
||||
ParseResult,
|
||||
ScalaCstNode,
|
||||
TokenBounds,
|
||||
LineColumn,
|
||||
} from "./types";
|
||||
import type { IToken, CstElement } from "chevrotain";
|
||||
|
||||
export { ScalaLexer, allTokens } from "./lexer";
|
||||
export { ScalaParser, parserInstance } from "./parser";
|
||||
export type {
|
||||
ParseResult,
|
||||
ScalaCstNode,
|
||||
TokenBounds,
|
||||
LineColumn,
|
||||
} from "./types";
|
||||
export type { IToken } from "chevrotain";
|
||||
|
||||
/**
|
||||
* CSTノードに位置情報を自動設定するヘルパー関数
|
||||
* @param cst - 処理対象のCSTノード
|
||||
* @param tokens - 解析で使用されたトークンの配列
|
||||
* @param text - 元のソースコードテキスト
|
||||
* @returns 位置情報が付与されたCSTノード
|
||||
*/
|
||||
function addLocationToCST(
|
||||
cst: ScalaCstNode,
|
||||
tokens: IToken[],
|
||||
text: string,
|
||||
): ScalaCstNode {
|
||||
if (!cst || !tokens) return cst;
|
||||
|
||||
// テキストから行の開始位置を計算
|
||||
const lineStarts = [0]; // 最初の行は0から始まる
|
||||
for (let i = 0; i < text.length; i++) {
|
||||
if (text[i] === "\n") {
|
||||
lineStarts.push(i + 1);
|
||||
}
|
||||
}
|
||||
|
||||
// オフセットから行番号と列番号を取得
|
||||
function getLineAndColumn(offset: number): LineColumn {
|
||||
let line = 1;
|
||||
for (let i = 0; i < lineStarts.length - 1; i++) {
|
||||
if (offset >= lineStarts[i] && offset < lineStarts[i + 1]) {
|
||||
line = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (offset >= lineStarts[lineStarts.length - 1]) {
|
||||
line = lineStarts.length;
|
||||
}
|
||||
|
||||
const column = offset - lineStarts[line - 1] + 1;
|
||||
return { line, column };
|
||||
}
|
||||
|
||||
// トークンから最小・最大位置を計算
|
||||
function findTokenBounds(node: ScalaCstNode): TokenBounds | null {
|
||||
if (!node) return null;
|
||||
|
||||
let minStart = Infinity;
|
||||
let maxEnd = -1;
|
||||
|
||||
function findTokensInNode(n: ScalaCstNode | IToken): void {
|
||||
if (!n) return;
|
||||
|
||||
// トークンの場合
|
||||
if (
|
||||
"startOffset" in n &&
|
||||
"endOffset" in n &&
|
||||
n.startOffset !== undefined &&
|
||||
n.endOffset !== undefined
|
||||
) {
|
||||
minStart = Math.min(minStart, n.startOffset);
|
||||
maxEnd = Math.max(maxEnd, n.endOffset);
|
||||
return;
|
||||
}
|
||||
|
||||
// CSTノードの場合
|
||||
if ("children" in n && n.children) {
|
||||
for (const children of Object.values(n.children)) {
|
||||
if (Array.isArray(children)) {
|
||||
children.forEach((child) => {
|
||||
// CstElementをScalaCstNode | ITokenに安全に変換
|
||||
if ("children" in child) {
|
||||
findTokensInNode(child as ScalaCstNode);
|
||||
} else {
|
||||
findTokensInNode(child as IToken);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
findTokensInNode(node);
|
||||
|
||||
if (minStart === Infinity || maxEnd === -1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { start: minStart, end: maxEnd };
|
||||
}
|
||||
|
||||
// 再帰的にCSTノードに位置情報を設定
|
||||
function setCSTLocation(node: ScalaCstNode): ScalaCstNode {
|
||||
if (!node) return node;
|
||||
|
||||
// トークンの場合はそのまま返す
|
||||
if (node.startOffset !== undefined) {
|
||||
return node;
|
||||
}
|
||||
|
||||
// CSTノードの場合
|
||||
if (node.children) {
|
||||
// 子ノードを先に処理
|
||||
const processedChildren: Record<string, CstElement[]> = {};
|
||||
for (const [key, children] of Object.entries(node.children)) {
|
||||
if (Array.isArray(children)) {
|
||||
processedChildren[key] = children.map((child) => {
|
||||
if ("children" in child) {
|
||||
return setCSTLocation(child as ScalaCstNode);
|
||||
}
|
||||
return child; // IToken
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// このノードの位置を計算
|
||||
const bounds = findTokenBounds({ ...node, children: processedChildren });
|
||||
|
||||
if (bounds) {
|
||||
const startLoc = getLineAndColumn(bounds.start);
|
||||
const endLoc = getLineAndColumn(bounds.end);
|
||||
|
||||
return {
|
||||
...node,
|
||||
children: processedChildren,
|
||||
location: {
|
||||
startOffset: bounds.start,
|
||||
endOffset: bounds.end,
|
||||
startLine: startLoc.line,
|
||||
endLine: endLoc.line,
|
||||
startColumn: startLoc.column,
|
||||
endColumn: endLoc.column,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
...node,
|
||||
children: processedChildren,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
return setCSTLocation(cst);
|
||||
}
|
||||
|
||||
export function parse(text: string): ParseResult {
|
||||
// Use legacy parser for now until modular parser is fixed
|
||||
return parseLegacy(text);
|
||||
}
|
||||
|
||||
// Legacy parser function (has left recursion issues)
|
||||
export function parseLegacy(text: string): ParseResult {
|
||||
// Tokenize
|
||||
const lexResult = ScalaLexer.tokenize(text);
|
||||
|
||||
if (lexResult.errors.length > 0) {
|
||||
throw new Error(
|
||||
`Lexing errors: ${lexResult.errors.map((e) => e.message).join(", ")}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Parse
|
||||
parserInstance.input = lexResult.tokens;
|
||||
const cst = parserInstance.compilationUnit();
|
||||
|
||||
if (parserInstance.errors.length > 0) {
|
||||
throw new Error(
|
||||
`Parsing errors: ${parserInstance.errors.map((e) => e.message).join(", ")}`,
|
||||
);
|
||||
}
|
||||
|
||||
// CSTに位置情報を追加
|
||||
const cstWithLocation = addLocationToCST(
|
||||
cst as ScalaCstNode,
|
||||
lexResult.tokens,
|
||||
text,
|
||||
);
|
||||
|
||||
return {
|
||||
cst: cstWithLocation,
|
||||
errors: [],
|
||||
comments: lexResult.groups.comments || [],
|
||||
};
|
||||
}
|
||||
|
||||
// Note: parseModular function was removed as the modular parser integration
|
||||
// is still in development. Use the main parse() function instead.
|
||||
@@ -1,479 +0,0 @@
|
||||
import { createToken, Lexer, ILexingResult } from "chevrotain";
|
||||
|
||||
// Keywords
|
||||
export const Val = createToken({ name: "Val", pattern: /val\b/ });
|
||||
export const Var = createToken({ name: "Var", pattern: /var\b/ });
|
||||
export const Def = createToken({ name: "Def", pattern: /def\b/ });
|
||||
export const Class = createToken({ name: "Class", pattern: /class\b/ });
|
||||
export const ObjectKeyword = createToken({
|
||||
name: "Object",
|
||||
pattern: /object\b/,
|
||||
});
|
||||
export const Trait = createToken({ name: "Trait", pattern: /trait\b/ });
|
||||
export const Extends = createToken({ name: "Extends", pattern: /extends\b/ });
|
||||
export const With = createToken({ name: "With", pattern: /with\b/ });
|
||||
export const If = createToken({ name: "If", pattern: /if\b/ });
|
||||
export const Else = createToken({ name: "Else", pattern: /else\b/ });
|
||||
export const While = createToken({ name: "While", pattern: /while\b/ });
|
||||
export const For = createToken({ name: "For", pattern: /for\b/ });
|
||||
export const Yield = createToken({ name: "Yield", pattern: /yield\b/ });
|
||||
export const Return = createToken({ name: "Return", pattern: /return\b/ });
|
||||
export const New = createToken({ name: "New", pattern: /new\b/ });
|
||||
export const This = createToken({ name: "This", pattern: /this\b/ });
|
||||
export const Super = createToken({ name: "Super", pattern: /super\b/ });
|
||||
export const Package = createToken({ name: "Package", pattern: /package\b/ });
|
||||
export const Import = createToken({ name: "Import", pattern: /import\b/ });
|
||||
export const Case = createToken({ name: "Case", pattern: /case\b/ });
|
||||
export const Match = createToken({ name: "Match", pattern: /match\b/ });
|
||||
export const Try = createToken({ name: "Try", pattern: /try\b/ });
|
||||
export const Catch = createToken({ name: "Catch", pattern: /catch\b/ });
|
||||
export const Finally = createToken({ name: "Finally", pattern: /finally\b/ });
|
||||
export const Throw = createToken({ name: "Throw", pattern: /throw\b/ });
|
||||
export const Null = createToken({ name: "Null", pattern: /null\b/ });
|
||||
export const True = createToken({ name: "True", pattern: /true\b/ });
|
||||
export const False = createToken({ name: "False", pattern: /false\b/ });
|
||||
export const NotImplemented = createToken({
|
||||
name: "NotImplemented",
|
||||
pattern: /\?\?\?/,
|
||||
});
|
||||
export const Type = createToken({ name: "Type", pattern: /type\b/ });
|
||||
export const Private = createToken({ name: "Private", pattern: /private\b/ });
|
||||
export const Protected = createToken({
|
||||
name: "Protected",
|
||||
pattern: /protected\b/,
|
||||
});
|
||||
export const Public = createToken({ name: "Public", pattern: /public\b/ });
|
||||
export const Abstract = createToken({
|
||||
name: "Abstract",
|
||||
pattern: /abstract\b/,
|
||||
});
|
||||
export const Final = createToken({ name: "Final", pattern: /final\b/ });
|
||||
export const Sealed = createToken({ name: "Sealed", pattern: /sealed\b/ });
|
||||
export const Implicit = createToken({
|
||||
name: "Implicit",
|
||||
pattern: /implicit\b/,
|
||||
});
|
||||
export const Lazy = createToken({ name: "Lazy", pattern: /lazy\b/ });
|
||||
export const Override = createToken({
|
||||
name: "Override",
|
||||
pattern: /override\b/,
|
||||
});
|
||||
export const Given = createToken({ name: "Given", pattern: /given\b/ });
|
||||
export const Using = createToken({ name: "Using", pattern: /using\b/ });
|
||||
export const To = createToken({ name: "To", pattern: /to\b/ });
|
||||
export const Enum = createToken({ name: "Enum", pattern: /enum\b/ });
|
||||
export const Array = createToken({ name: "Array", pattern: /Array\b/ });
|
||||
export const Extension = createToken({
|
||||
name: "Extension",
|
||||
pattern: /extension\b/,
|
||||
});
|
||||
export const Export = createToken({ name: "Export", pattern: /export\b/ });
|
||||
export const Opaque = createToken({ name: "Opaque", pattern: /opaque\b/ });
|
||||
export const Inline = createToken({ name: "Inline", pattern: /inline\b/ });
|
||||
export const Transparent = createToken({
|
||||
name: "Transparent",
|
||||
pattern: /transparent\b/,
|
||||
});
|
||||
|
||||
// Identifiers (must come after keywords)
|
||||
// Enhanced Unicode identifier support following Scala Language Specification
|
||||
// Operator identifier for custom operators (e.g., +++, <~>, etc.)
|
||||
export const OperatorIdentifier = createToken({
|
||||
name: "OperatorIdentifier",
|
||||
pattern: /[+\-*/%:&|^<>=!~?#@$\\]+/,
|
||||
});
|
||||
|
||||
// Backward compatible with existing implementation, enhanced mathematical symbol support
|
||||
// Supports: Latin, Greek, Cyrillic, CJK, Arabic, Hebrew, Mathematical symbols, Emojis (via surrogate pairs)
|
||||
export const Identifier = createToken({
|
||||
name: "Identifier",
|
||||
pattern:
|
||||
/(?:_[a-zA-Z0-9_$\u00C0-\u00FF\u0370-\u03FF\u0400-\u04FF\u3040-\u309F\u30A0-\u30FF\u4E00-\u9FFF\u0590-\u05FF\u0600-\u06FF\u2200-\u22FF\u27C0-\u27EF\u2980-\u29FF\u2A00-\u2AFF]+|[a-zA-Z$\u00C0-\u00FF\u0370-\u03FF\u0400-\u04FF\u3040-\u309F\u30A0-\u30FF\u4E00-\u9FFF\u0590-\u05FF\u0600-\u06FF\u2200-\u22FF\u27C0-\u27EF\u2980-\u29FF\u2A00-\u2AFF][a-zA-Z0-9_$\u00C0-\u00FF\u0370-\u03FF\u0400-\u04FF\u3040-\u309F\u30A0-\u30FF\u4E00-\u9FFF\u0590-\u05FF\u0600-\u06FF\u2200-\u22FF\u27C0-\u27EF\u2980-\u29FF\u2A00-\u2AFF]*)/u,
|
||||
});
|
||||
|
||||
// Literals
|
||||
export const IntegerLiteral = createToken({
|
||||
name: "IntegerLiteral",
|
||||
pattern: /-?\d+[lLiIsSbB]?/,
|
||||
});
|
||||
|
||||
// Scientific notation literal (must come before FloatingPointLiteral)
|
||||
export const ScientificNotationLiteral = createToken({
|
||||
name: "ScientificNotationLiteral",
|
||||
pattern: /-?\d+(\.\d+)?[eE][+-]?\d+[fFdD]?/,
|
||||
});
|
||||
|
||||
export const FloatingPointLiteral = createToken({
|
||||
name: "FloatingPointLiteral",
|
||||
pattern: /-?\d+\.\d+[fFdD]?|-?\.\d+[fFdD]?/,
|
||||
});
|
||||
|
||||
export const StringLiteral = createToken({
|
||||
name: "StringLiteral",
|
||||
pattern: /"""[\s\S]*?"""|"([^"\\]|\\.|\\u[0-9A-Fa-f]{4})*"/,
|
||||
});
|
||||
|
||||
export const InterpolatedStringLiteral = createToken({
|
||||
name: "InterpolatedStringLiteral",
|
||||
pattern:
|
||||
/[a-zA-Z_][a-zA-Z0-9_]*"""[\s\S]*?"""|[a-zA-Z_][a-zA-Z0-9_]*"([^"\\]|\\.|\\u[0-9A-Fa-f]{4}|\$[a-zA-Z_][a-zA-Z0-9_]*|\$\{[^}]*\})*"/,
|
||||
});
|
||||
|
||||
export const CharLiteral = createToken({
|
||||
name: "CharLiteral",
|
||||
pattern: /'([^'\\]|\\.|\\u[0-9A-Fa-f]{4})'/,
|
||||
});
|
||||
|
||||
// Operators
|
||||
export const Equals = createToken({ name: "Equals", pattern: /=/ });
|
||||
export const Plus = createToken({ name: "Plus", pattern: /\+/ });
|
||||
export const Minus = createToken({ name: "Minus", pattern: /-/ });
|
||||
export const Star = createToken({ name: "Star", pattern: /\*/ });
|
||||
export const Slash = createToken({ name: "Slash", pattern: /\// });
|
||||
export const Backslash = createToken({ name: "Backslash", pattern: /\\/ });
|
||||
export const Percent = createToken({ name: "Percent", pattern: /%/ });
|
||||
export const LessThan = createToken({ name: "LessThan", pattern: /</ });
|
||||
export const GreaterThan = createToken({ name: "GreaterThan", pattern: />/ });
|
||||
export const LessThanEquals = createToken({
|
||||
name: "LessThanEquals",
|
||||
pattern: /<=/,
|
||||
});
|
||||
export const GreaterThanEquals = createToken({
|
||||
name: "GreaterThanEquals",
|
||||
pattern: />=/,
|
||||
});
|
||||
export const EqualsEquals = createToken({
|
||||
name: "EqualsEquals",
|
||||
pattern: /==/,
|
||||
});
|
||||
export const DoubleEquals = EqualsEquals; // Alias for modular parser compatibility
|
||||
export const NotEquals = createToken({ name: "NotEquals", pattern: /!=/ });
|
||||
export const LogicalAnd = createToken({ name: "LogicalAnd", pattern: /&&/ });
|
||||
export const LogicalOr = createToken({ name: "LogicalOr", pattern: /\|\|/ });
|
||||
export const Exclamation = createToken({ name: "Exclamation", pattern: /!/ });
|
||||
export const Arrow = createToken({ name: "Arrow", pattern: /=>/ });
|
||||
export const TypeLambdaArrow = createToken({
|
||||
name: "TypeLambdaArrow",
|
||||
pattern: /=>>/,
|
||||
});
|
||||
export const DoubleArrow = TypeLambdaArrow; // Alias for modular parser compatibility
|
||||
export const LeftArrow = createToken({ name: "LeftArrow", pattern: /<-/ });
|
||||
export const RightArrow = createToken({ name: "RightArrow", pattern: /->/ });
|
||||
export const ContextArrow = createToken({
|
||||
name: "ContextArrow",
|
||||
pattern: /\?=>/,
|
||||
});
|
||||
export const SubtypeOf = createToken({ name: "SubtypeOf", pattern: /<:/ });
|
||||
export const ColonLess = SubtypeOf; // Alias for modular parser compatibility
|
||||
export const SupertypeOf = createToken({ name: "SupertypeOf", pattern: />:/ });
|
||||
export const GreaterColon = SupertypeOf; // Alias for modular parser compatibility
|
||||
export const AppendOp = createToken({ name: "AppendOp", pattern: /:\+/ });
|
||||
export const PlusColon = AppendOp; // Alias for modular parser compatibility
|
||||
export const ColonPlus = createToken({ name: "ColonPlus", pattern: /:\+/ }); // Same as AppendOp but separate token for parser
|
||||
export const PrependOp = createToken({ name: "PrependOp", pattern: /::/ });
|
||||
export const ColonColon = PrependOp; // Alias for modular parser compatibility
|
||||
export const ConcatOp = createToken({ name: "ConcatOp", pattern: /\+\+/ });
|
||||
export const DoublePlus = ConcatOp; // Alias for modular parser compatibility
|
||||
export const AppendEquals = createToken({
|
||||
name: "AppendEquals",
|
||||
pattern: /\+\+=/,
|
||||
});
|
||||
// Compound assignment operators
|
||||
export const PlusEquals = createToken({ name: "PlusEquals", pattern: /\+=/ });
|
||||
export const MinusEquals = createToken({ name: "MinusEquals", pattern: /-=/ });
|
||||
export const StarEquals = createToken({ name: "StarEquals", pattern: /\*=/ });
|
||||
export const SlashEquals = createToken({ name: "SlashEquals", pattern: /\/=/ });
|
||||
export const PercentEquals = createToken({
|
||||
name: "PercentEquals",
|
||||
pattern: /%=/,
|
||||
});
|
||||
// sbt DSL operators
|
||||
export const DoublePercent = createToken({
|
||||
name: "DoublePercent",
|
||||
pattern: /%%/,
|
||||
});
|
||||
// Bitwise operators
|
||||
export const BitwiseAnd = createToken({ name: "BitwiseAnd", pattern: /&/ });
|
||||
export const BitwiseOr = createToken({ name: "BitwiseOr", pattern: /\|/ });
|
||||
export const BitwiseXor = createToken({ name: "BitwiseXor", pattern: /\^/ });
|
||||
export const BitwiseTilde = createToken({ name: "BitwiseTilde", pattern: /~/ });
|
||||
export const LeftShift = createToken({ name: "LeftShift", pattern: /<</ });
|
||||
export const RightShift = createToken({ name: "RightShift", pattern: />>/ });
|
||||
export const UnsignedRightShift = createToken({
|
||||
name: "UnsignedRightShift",
|
||||
pattern: />>>/,
|
||||
});
|
||||
export const Colon = createToken({ name: "Colon", pattern: /:/ });
|
||||
export const ColonEquals = createToken({ name: "ColonEquals", pattern: /:=/ });
|
||||
export const SbtAssign = ColonEquals; // Alias for sbt compatibility
|
||||
export const Semicolon = createToken({ name: "Semicolon", pattern: /;/ });
|
||||
export const Comma = createToken({ name: "Comma", pattern: /,/ });
|
||||
export const Dot = createToken({ name: "Dot", pattern: /\./ });
|
||||
export const Underscore = createToken({
|
||||
name: "Underscore",
|
||||
pattern: /_/,
|
||||
});
|
||||
export const At = createToken({ name: "At", pattern: /@/ });
|
||||
export const Question = createToken({ name: "Question", pattern: /\?/ });
|
||||
|
||||
// Quote and Splice tokens for Scala 3 macros
|
||||
export const QuoteStart = createToken({ name: "QuoteStart", pattern: /'\{/ });
|
||||
export const SpliceStart = createToken({
|
||||
name: "SpliceStart",
|
||||
pattern: /\$\{/,
|
||||
});
|
||||
|
||||
// Additional tokens for modular parser
|
||||
export const Quote = createToken({ name: "Quote", pattern: /'/ });
|
||||
export const Dollar = createToken({ name: "Dollar", pattern: /\$/ });
|
||||
// QuestionArrow is now alias for ContextArrow to avoid duplicate patterns
|
||||
export const QuestionArrow = ContextArrow;
|
||||
|
||||
// String interpolation tokens
|
||||
export const InterpolatedString = createToken({
|
||||
name: "InterpolatedString",
|
||||
pattern: /s"([^"\\]|\\.|\\u[0-9A-Fa-f]{4})*"/,
|
||||
});
|
||||
export const FormattedString = createToken({
|
||||
name: "FormattedString",
|
||||
pattern: /f"([^"\\]|\\.|\\u[0-9A-Fa-f]{4})*"/,
|
||||
});
|
||||
export const RawString = createToken({
|
||||
name: "RawString",
|
||||
pattern: /raw"([^"\\]|\\.|\\u[0-9A-Fa-f]{4})*"/,
|
||||
});
|
||||
export const CustomInterpolatedString = createToken({
|
||||
name: "CustomInterpolatedString",
|
||||
pattern: /[a-zA-Z_][a-zA-Z0-9_]*"([^"\\]|\\.|\\u[0-9A-Fa-f]{4})*"/,
|
||||
});
|
||||
|
||||
// Numeric suffix tokens
|
||||
export const LongSuffix = createToken({ name: "LongSuffix", pattern: /[lL]/ });
|
||||
export const IntSuffix = createToken({ name: "IntSuffix", pattern: /[iI]/ });
|
||||
export const ShortSuffix = createToken({
|
||||
name: "ShortSuffix",
|
||||
pattern: /[sS]/,
|
||||
});
|
||||
export const ByteSuffix = createToken({ name: "ByteSuffix", pattern: /[bB]/ });
|
||||
export const FloatSuffix = createToken({
|
||||
name: "FloatSuffix",
|
||||
pattern: /[fF]/,
|
||||
});
|
||||
export const DoubleSuffix = createToken({
|
||||
name: "DoubleSuffix",
|
||||
pattern: /[dD]/,
|
||||
});
|
||||
|
||||
// Additional missing tokens
|
||||
export const Hash = createToken({ name: "Hash", pattern: /#/ });
|
||||
|
||||
// Delimiters
|
||||
export const LeftParen = createToken({ name: "LeftParen", pattern: /\(/ });
|
||||
export const RightParen = createToken({ name: "RightParen", pattern: /\)/ });
|
||||
export const LeftBracket = createToken({ name: "LeftBracket", pattern: /\[/ });
|
||||
export const RightBracket = createToken({
|
||||
name: "RightBracket",
|
||||
pattern: /\]/,
|
||||
});
|
||||
export const LeftBrace = createToken({ name: "LeftBrace", pattern: /\{/ });
|
||||
export const RightBrace = createToken({ name: "RightBrace", pattern: /\}/ });
|
||||
|
||||
// Whitespace and Comments
|
||||
export const WhiteSpace = createToken({
|
||||
name: "WhiteSpace",
|
||||
pattern: /\s+/,
|
||||
group: Lexer.SKIPPED,
|
||||
});
|
||||
|
||||
export const LineComment = createToken({
|
||||
name: "LineComment",
|
||||
pattern: /\/\/[^\n\r]*/,
|
||||
group: "comments",
|
||||
});
|
||||
|
||||
export const BlockComment = createToken({
|
||||
name: "BlockComment",
|
||||
pattern: /\/\*([^*]|\*(?!\/))*\*\//,
|
||||
group: "comments",
|
||||
});
|
||||
|
||||
// All tokens in order
|
||||
export const allTokens = [
|
||||
// Comments (must come before operators)
|
||||
LineComment,
|
||||
BlockComment,
|
||||
|
||||
// Whitespace
|
||||
WhiteSpace,
|
||||
|
||||
// Keywords (must come before Identifier)
|
||||
Val,
|
||||
Var,
|
||||
Def,
|
||||
Class,
|
||||
ObjectKeyword,
|
||||
Trait,
|
||||
Extends,
|
||||
With,
|
||||
If,
|
||||
Else,
|
||||
While,
|
||||
For,
|
||||
Yield,
|
||||
Return,
|
||||
New,
|
||||
This,
|
||||
Super,
|
||||
Package,
|
||||
Import,
|
||||
Case,
|
||||
Match,
|
||||
Try,
|
||||
Catch,
|
||||
Finally,
|
||||
Throw,
|
||||
Null,
|
||||
True,
|
||||
False,
|
||||
NotImplemented,
|
||||
Type,
|
||||
Private,
|
||||
Protected,
|
||||
Public,
|
||||
Abstract,
|
||||
Final,
|
||||
Sealed,
|
||||
Implicit,
|
||||
Lazy,
|
||||
Override,
|
||||
Given,
|
||||
Using,
|
||||
To,
|
||||
Enum,
|
||||
Array,
|
||||
Extension,
|
||||
Export,
|
||||
Opaque,
|
||||
Inline,
|
||||
Transparent,
|
||||
|
||||
// Literals
|
||||
ScientificNotationLiteral, // Must come before FloatingPointLiteral
|
||||
FloatingPointLiteral, // Must come before IntegerLiteral
|
||||
IntegerLiteral,
|
||||
// String interpolation literals (must come before StringLiteral)
|
||||
CustomInterpolatedString,
|
||||
InterpolatedString,
|
||||
FormattedString,
|
||||
RawString,
|
||||
InterpolatedStringLiteral, // Must come before StringLiteral
|
||||
StringLiteral,
|
||||
CharLiteral,
|
||||
|
||||
// Multi-character operators (must come before single-character)
|
||||
TypeLambdaArrow, // Must come before Arrow to avoid ambiguity
|
||||
ContextArrow, // Must come before Arrow to avoid ambiguity
|
||||
Arrow,
|
||||
LeftArrow,
|
||||
RightArrow,
|
||||
SubtypeOf,
|
||||
SupertypeOf,
|
||||
LessThanEquals,
|
||||
GreaterThanEquals,
|
||||
EqualsEquals,
|
||||
NotEquals,
|
||||
LogicalAnd,
|
||||
LogicalOr,
|
||||
ColonEquals, // := must come before :
|
||||
AppendOp,
|
||||
PrependOp,
|
||||
AppendEquals, // ++= must come before ++
|
||||
ConcatOp,
|
||||
// Quote and splice tokens (must come before single-character)
|
||||
QuoteStart, // '{ must come before single '
|
||||
SpliceStart, // ${ must come before single $
|
||||
// Compound assignment operators
|
||||
PlusEquals,
|
||||
MinusEquals,
|
||||
StarEquals,
|
||||
SlashEquals,
|
||||
PercentEquals,
|
||||
// Bitwise shift operators (must come before single-character)
|
||||
UnsignedRightShift, // >>> must come before >>
|
||||
LeftShift,
|
||||
RightShift,
|
||||
|
||||
// Single-character operators
|
||||
Equals,
|
||||
Plus,
|
||||
Minus,
|
||||
Star,
|
||||
Slash,
|
||||
Backslash,
|
||||
DoublePercent, // %% must come before single %
|
||||
Percent,
|
||||
LessThan,
|
||||
GreaterThan,
|
||||
Exclamation,
|
||||
BitwiseAnd,
|
||||
BitwiseOr,
|
||||
BitwiseXor,
|
||||
BitwiseTilde,
|
||||
Colon,
|
||||
Semicolon,
|
||||
Comma,
|
||||
Dot,
|
||||
At,
|
||||
// QuestionArrow removed - now an alias for ContextArrow
|
||||
Question,
|
||||
Quote,
|
||||
Dollar,
|
||||
Hash,
|
||||
|
||||
// Delimiters
|
||||
LeftParen,
|
||||
RightParen,
|
||||
LeftBracket,
|
||||
RightBracket,
|
||||
LeftBrace,
|
||||
RightBrace,
|
||||
|
||||
// Operator identifier (before regular identifier)
|
||||
OperatorIdentifier,
|
||||
|
||||
// Identifier (must come before underscore)
|
||||
Identifier,
|
||||
|
||||
// Underscore (must come after identifier to not interfere with _identifier patterns)
|
||||
Underscore,
|
||||
];
|
||||
|
||||
// レキサーの作成(インポート時の問題を回避するための遅延初期化)
|
||||
let scalaLexerInstance: Lexer | null = null;
|
||||
|
||||
/**
|
||||
* Scalaコードの字句解析を行うレキサー
|
||||
*/
|
||||
export const ScalaLexer = {
|
||||
/**
|
||||
* レキサーインスタンスを取得(遅延初期化)
|
||||
* @returns Chevrotainレキサーのインスタンス
|
||||
*/
|
||||
get instance(): Lexer {
|
||||
if (!scalaLexerInstance) {
|
||||
scalaLexerInstance = new Lexer(allTokens);
|
||||
}
|
||||
return scalaLexerInstance;
|
||||
},
|
||||
/**
|
||||
* 入力文字列をトークン化
|
||||
* @param input - 字句解析対象のScalaソースコード
|
||||
* @returns トークン化の結果(トークン、エラー、グループ化されたトークン)
|
||||
*/
|
||||
tokenize(input: string): ILexingResult {
|
||||
return this.instance.tokenize(input);
|
||||
},
|
||||
};
|
||||
|
||||
// Export lexer instance for backward compatibility with tests
|
||||
export const lexerInstance = ScalaLexer;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,58 +0,0 @@
|
||||
/**
|
||||
* Base parser module with shared utilities and interfaces
|
||||
*/
|
||||
import * as tokens from "../lexer";
|
||||
import { CstParser, ParserMethod, CstNode } from "chevrotain";
|
||||
import type { TokenType } from "chevrotain";
|
||||
|
||||
export interface ParserRuleMixin {
|
||||
// Utility methods for parser rules - these need to match CstParser access levels
|
||||
RULE: CstParser["RULE"];
|
||||
SUBRULE: CstParser["SUBRULE"];
|
||||
CONSUME: CstParser["CONSUME"];
|
||||
MANY: CstParser["MANY"];
|
||||
MANY_SEP: CstParser["MANY_SEP"];
|
||||
OPTION: CstParser["OPTION"];
|
||||
OR: CstParser["OR"];
|
||||
AT_LEAST_ONE: CstParser["AT_LEAST_ONE"];
|
||||
AT_LEAST_ONE_SEP: CstParser["AT_LEAST_ONE_SEP"];
|
||||
LA: CstParser["LA"];
|
||||
performSelfAnalysis: CstParser["performSelfAnalysis"];
|
||||
}
|
||||
|
||||
export abstract class BaseParserModule {
|
||||
protected parser: ParserRuleMixin;
|
||||
|
||||
constructor(parser: ParserRuleMixin) {
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
// Helper methods for common patterns
|
||||
protected consumeTokenType(tokenType: TokenType) {
|
||||
return this.parser.CONSUME(tokenType);
|
||||
}
|
||||
|
||||
protected optionalConsume(tokenType: TokenType) {
|
||||
return this.parser.OPTION(() => this.parser.CONSUME(tokenType));
|
||||
}
|
||||
|
||||
protected manyOf(rule: () => void) {
|
||||
return this.parser.MANY(rule);
|
||||
}
|
||||
|
||||
protected oneOf(
|
||||
alternatives: Array<{ ALT: () => void; GATE?: () => boolean }>,
|
||||
) {
|
||||
return this.parser.OR(alternatives);
|
||||
}
|
||||
|
||||
protected subrule(rule: ParserMethod<unknown[], CstNode>) {
|
||||
return this.parser.SUBRULE(rule);
|
||||
}
|
||||
|
||||
protected lookahead(offset: number) {
|
||||
return this.parser.LA(offset);
|
||||
}
|
||||
}
|
||||
|
||||
export { tokens };
|
||||
@@ -1,179 +0,0 @@
|
||||
/**
|
||||
* Definition parsing module for class, object, trait, method, and variable definitions
|
||||
*/
|
||||
import { BaseParserModule, tokens } from "./base";
|
||||
import type { ParserMethod, CstNode } from "chevrotain";
|
||||
|
||||
export class DefinitionParserMixin extends BaseParserModule {
|
||||
// Dependencies from other modules
|
||||
annotation!: ParserMethod<unknown[], CstNode>;
|
||||
modifier!: ParserMethod<unknown[], CstNode>;
|
||||
typeParameters!: ParserMethod<unknown[], CstNode>;
|
||||
classParameters!: ParserMethod<unknown[], CstNode>;
|
||||
extendsClause!: ParserMethod<unknown[], CstNode>;
|
||||
classBody!: ParserMethod<unknown[], CstNode>;
|
||||
type!: ParserMethod<unknown[], CstNode>;
|
||||
expression!: ParserMethod<unknown[], CstNode>;
|
||||
pattern!: ParserMethod<unknown[], CstNode>;
|
||||
parameterLists!: ParserMethod<unknown[], CstNode>;
|
||||
|
||||
// Class definition
|
||||
classDefinition = this.parser.RULE("classDefinition", () => {
|
||||
this.consumeTokenType(tokens.Class);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
// Constructor annotations (for DI patterns like @Inject())
|
||||
this.manyOf(() => this.subrule(this.annotation));
|
||||
// Constructor parameters (multiple parameter lists supported)
|
||||
this.parser.MANY(() => this.subrule(this.classParameters));
|
||||
this.parser.OPTION(() => this.subrule(this.extendsClause));
|
||||
this.parser.OPTION(() => this.subrule(this.classBody));
|
||||
});
|
||||
|
||||
// Object definition
|
||||
objectDefinition = this.parser.RULE("objectDefinition", () => {
|
||||
this.consumeTokenType(tokens.ObjectKeyword);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => this.subrule(this.extendsClause));
|
||||
this.parser.OPTION(() => this.subrule(this.classBody));
|
||||
});
|
||||
|
||||
// Trait definition
|
||||
traitDefinition = this.parser.RULE("traitDefinition", () => {
|
||||
this.consumeTokenType(tokens.Trait);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
this.parser.OPTION(() => this.subrule(this.extendsClause));
|
||||
this.parser.OPTION(() => this.subrule(this.classBody));
|
||||
});
|
||||
|
||||
// Enum definition (Scala 3)
|
||||
enumDefinition = this.parser.RULE("enumDefinition", () => {
|
||||
this.consumeTokenType(tokens.Enum);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
this.parser.OPTION(() => this.subrule(this.classParameters));
|
||||
this.parser.OPTION(() => this.subrule(this.extendsClause));
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.manyOf(() => this.subrule(this.enumCaseDef));
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
});
|
||||
|
||||
enumCaseDef = this.parser.RULE("enumCaseDef", () => {
|
||||
this.consumeTokenType(tokens.Case);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => this.subrule(this.classParameters));
|
||||
this.parser.OPTION(() => this.subrule(this.extendsClause));
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
|
||||
// Extension definition (Scala 3)
|
||||
extensionDefinition = this.parser.RULE("extensionDefinition", () => {
|
||||
this.consumeTokenType(tokens.Extension);
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.manyOf(() => this.subrule(this.extensionMemberDef));
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
});
|
||||
|
||||
extensionMemberDef = this.parser.RULE("extensionMemberDef", () => {
|
||||
this.manyOf(() => this.subrule(this.modifier));
|
||||
this.subrule(this.defDefinition);
|
||||
});
|
||||
|
||||
// Val definition
|
||||
valDefinition = this.parser.RULE("valDefinition", () => {
|
||||
this.consumeTokenType(tokens.Val);
|
||||
this.oneOf([
|
||||
{
|
||||
// Simple variable with optional type: val x: Type = expr or val x: Type (abstract)
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Equals);
|
||||
this.subrule(this.expression);
|
||||
});
|
||||
},
|
||||
GATE: () => {
|
||||
// This alternative is for simple identifier patterns only
|
||||
// Must handle: val x = ..., val x: Type = ..., val x: Type (abstract)
|
||||
// Must NOT handle: val (x, y) = ..., val SomeClass(...) = ...
|
||||
const first = this.lookahead(1);
|
||||
const second = this.lookahead(2);
|
||||
|
||||
// If first token is not identifier, this is not a simple val
|
||||
if (!first || first.tokenType !== tokens.Identifier) return false;
|
||||
|
||||
// If second token is left paren, this is a constructor pattern
|
||||
if (second && second.tokenType === tokens.LeftParen) return false;
|
||||
|
||||
// Otherwise, this is a simple identifier (with or without type, with or without assignment)
|
||||
return true;
|
||||
},
|
||||
},
|
||||
{
|
||||
// Pattern matching: val (x, y) = expr or val SomeClass(...) = expr
|
||||
ALT: () => {
|
||||
this.subrule(this.pattern);
|
||||
this.consumeTokenType(tokens.Equals);
|
||||
this.subrule(this.expression);
|
||||
},
|
||||
},
|
||||
]);
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
|
||||
// Var definition
|
||||
varDefinition = this.parser.RULE("varDefinition", () => {
|
||||
this.consumeTokenType(tokens.Var);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
this.consumeTokenType(tokens.Equals);
|
||||
this.subrule(this.expression);
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
|
||||
// Method definition
|
||||
defDefinition = this.parser.RULE("defDefinition", () => {
|
||||
this.consumeTokenType(tokens.Def);
|
||||
this.oneOf([
|
||||
// Regular method name
|
||||
{ ALT: () => this.consumeTokenType(tokens.Identifier) },
|
||||
// Constructor (this keyword)
|
||||
{ ALT: () => this.consumeTokenType(tokens.This) },
|
||||
]);
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
this.parser.OPTION(() => this.subrule(this.parameterLists));
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Equals);
|
||||
this.subrule(this.expression);
|
||||
});
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
|
||||
// Type definition
|
||||
typeDefinition = this.parser.RULE("typeDefinition", () => {
|
||||
this.consumeTokenType(tokens.Type);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
this.consumeTokenType(tokens.Equals);
|
||||
this.subrule(this.type);
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
}
|
||||
@@ -1,425 +0,0 @@
|
||||
/**
|
||||
* Expression parsing module for all types of expressions in Scala
|
||||
*/
|
||||
import { BaseParserModule, tokens } from "./base";
|
||||
import type { ParserMethod, CstNode } from "chevrotain";
|
||||
|
||||
export class ExpressionParserMixin extends BaseParserModule {
|
||||
// Dependencies from other modules
|
||||
annotation!: ParserMethod<unknown[], CstNode>;
|
||||
modifier!: ParserMethod<unknown[], CstNode>;
|
||||
type!: ParserMethod<unknown[], CstNode>;
|
||||
literal!: ParserMethod<unknown[], CstNode>;
|
||||
qualifiedIdentifier!: ParserMethod<unknown[], CstNode>;
|
||||
pattern!: ParserMethod<unknown[], CstNode>;
|
||||
parameterLists!: ParserMethod<unknown[], CstNode>;
|
||||
typeArgument!: ParserMethod<unknown[], CstNode>;
|
||||
caseClause!: ParserMethod<unknown[], CstNode>;
|
||||
generator!: ParserMethod<unknown[], CstNode>;
|
||||
|
||||
// Main expression rule
|
||||
expression = this.parser.RULE("expression", () => {
|
||||
this.parser.OR([
|
||||
// Polymorphic function literal (Scala 3)
|
||||
{
|
||||
ALT: () => this.subrule(this.polymorphicFunctionLiteral),
|
||||
GATE: () => {
|
||||
const la1 = this.parser.LA(1);
|
||||
return la1?.tokenType === tokens.LeftBracket;
|
||||
},
|
||||
},
|
||||
// Regular expressions
|
||||
{ ALT: () => this.subrule(this.assignmentOrInfixExpression) },
|
||||
]);
|
||||
});
|
||||
|
||||
// Assignment or infix expression
|
||||
assignmentOrInfixExpression = this.parser.RULE(
|
||||
"assignmentOrInfixExpression",
|
||||
() => {
|
||||
this.subrule(this.postfixExpression);
|
||||
this.parser.MANY(() => {
|
||||
this.subrule(this.infixOperator);
|
||||
this.subrule(this.postfixExpression);
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Postfix expression
|
||||
postfixExpression = this.parser.RULE("postfixExpression", () => {
|
||||
this.subrule(this.primaryExpression);
|
||||
this.parser.MANY(() => {
|
||||
this.parser.OR([
|
||||
// Method call with parentheses
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.expression),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
// Type arguments
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftBracket);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.typeArgument),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBracket);
|
||||
},
|
||||
},
|
||||
// Member access
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Dot);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
},
|
||||
},
|
||||
// Postfix operator (like Ask pattern ?)
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Question);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
// Primary expression
|
||||
primaryExpression = this.parser.RULE("primaryExpression", () => {
|
||||
this.parser.OR([
|
||||
// Literals
|
||||
{ ALT: () => this.subrule(this.literal) },
|
||||
// Identifier
|
||||
{ ALT: () => this.consumeTokenType(tokens.Identifier) },
|
||||
// This and super
|
||||
{ ALT: () => this.consumeTokenType(tokens.This) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Super) },
|
||||
// Underscore (placeholder)
|
||||
{ ALT: () => this.consumeTokenType(tokens.Underscore) },
|
||||
// Parenthesized expression
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.OPTION(() => this.subrule(this.expression));
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
// Block expression
|
||||
{ ALT: () => this.subrule(this.blockExpression) },
|
||||
// New expression
|
||||
{ ALT: () => this.subrule(this.newExpression) },
|
||||
// Partial function literal
|
||||
{ ALT: () => this.subrule(this.partialFunctionLiteral) },
|
||||
// Quote expression (Scala 3)
|
||||
{ ALT: () => this.subrule(this.quoteExpression) },
|
||||
// Splice expression (Scala 3)
|
||||
{ ALT: () => this.subrule(this.spliceExpression) },
|
||||
// If expression
|
||||
{ ALT: () => this.subrule(this.ifExpression) },
|
||||
// While expression
|
||||
{ ALT: () => this.subrule(this.whileExpression) },
|
||||
// Try expression
|
||||
{ ALT: () => this.subrule(this.tryExpression) },
|
||||
// For expression
|
||||
{ ALT: () => this.subrule(this.forExpression) },
|
||||
// Match expression
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.expression);
|
||||
this.consumeTokenType(tokens.Match);
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.MANY(() => this.subrule(this.caseClause));
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
},
|
||||
},
|
||||
// Lambda expression
|
||||
{
|
||||
ALT: () => {
|
||||
this.parser.OR([
|
||||
// Simple identifier lambda: x =>
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
},
|
||||
},
|
||||
// Multiple parameters with optional types: (x, y) =>
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
},
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
]);
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
this.subrule(this.expression);
|
||||
},
|
||||
GATE: () => {
|
||||
const la1 = this.parser.LA(1);
|
||||
const la2 = this.parser.LA(2);
|
||||
|
||||
// Simple lambda: identifier =>
|
||||
if (
|
||||
la1?.tokenType === tokens.Identifier &&
|
||||
la2?.tokenType === tokens.Arrow
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Parenthesized lambda: ( ... ) =>
|
||||
if (la1?.tokenType === tokens.LeftParen) {
|
||||
let i = 2;
|
||||
let parenCount = 1;
|
||||
while (parenCount > 0 && this.parser.LA(i)) {
|
||||
const token = this.parser.LA(i);
|
||||
if (token?.tokenType === tokens.LeftParen) parenCount++;
|
||||
if (token?.tokenType === tokens.RightParen) parenCount--;
|
||||
i++;
|
||||
}
|
||||
return this.parser.LA(i)?.tokenType === tokens.Arrow;
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Infix operator
|
||||
infixOperator = this.parser.RULE("infixOperator", () => {
|
||||
this.parser.OR([
|
||||
// Special compound assignment operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.PlusEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.MinusEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.StarEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.SlashEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.PercentEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.AppendEquals) },
|
||||
// sbt-specific operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.SbtAssign) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.DoublePercent) },
|
||||
// Basic operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.Plus) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Minus) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Star) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Slash) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Percent) },
|
||||
// Comparison operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.Equals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.EqualsEquals) }, // Use EqualsEquals instead of DoubleEquals
|
||||
{ ALT: () => this.consumeTokenType(tokens.NotEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.LessThan) }, // Use LessThan instead of Less
|
||||
{ ALT: () => this.consumeTokenType(tokens.GreaterThan) }, // Use GreaterThan instead of Greater
|
||||
{ ALT: () => this.consumeTokenType(tokens.LessThanEquals) }, // Use LessThanEquals instead of LessEquals
|
||||
{ ALT: () => this.consumeTokenType(tokens.GreaterThanEquals) }, // Use GreaterThanEquals instead of GreaterEquals
|
||||
// Logical operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.LogicalAnd) }, // Use LogicalAnd instead of DoubleAmpersand
|
||||
{ ALT: () => this.consumeTokenType(tokens.LogicalOr) }, // Use LogicalOr instead of DoublePipe
|
||||
// Bitwise operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.BitwiseAnd) }, // Use BitwiseAnd instead of Ampersand
|
||||
{ ALT: () => this.consumeTokenType(tokens.BitwiseOr) }, // Use BitwiseOr instead of Pipe
|
||||
{ ALT: () => this.consumeTokenType(tokens.BitwiseXor) }, // Use BitwiseXor instead of Caret
|
||||
// Shift operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.LeftShift) }, // Use LeftShift instead of DoubleLeftAngle
|
||||
{ ALT: () => this.consumeTokenType(tokens.RightShift) }, // Use RightShift instead of DoubleRightAngle
|
||||
{ ALT: () => this.consumeTokenType(tokens.UnsignedRightShift) }, // Use UnsignedRightShift instead of TripleRightAngle
|
||||
// Type operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.Colon) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.ColonEquals) },
|
||||
// Collection operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.ConcatOp) }, // Use ConcatOp instead of DoublePlus
|
||||
{ ALT: () => this.consumeTokenType(tokens.PrependOp) }, // Use PrependOp instead of ColonColon
|
||||
{ ALT: () => this.consumeTokenType(tokens.AppendOp) }, // Use AppendOp instead of ColonPlus/PlusColon
|
||||
// XML operators
|
||||
{ ALT: () => this.consumeTokenType(tokens.Backslash) },
|
||||
// General operator
|
||||
{ ALT: () => this.consumeTokenType(tokens.OperatorIdentifier) },
|
||||
// Identifier as operator (for named methods used as infix)
|
||||
{
|
||||
ALT: () => this.consumeTokenType(tokens.Identifier),
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Polymorphic function literal (Scala 3)
|
||||
polymorphicFunctionLiteral = this.parser.RULE(
|
||||
"polymorphicFunctionLiteral",
|
||||
() => {
|
||||
this.consumeTokenType(tokens.LeftBracket);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.polymorphicTypeParameter),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBracket);
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
this.subrule(this.expression);
|
||||
},
|
||||
);
|
||||
|
||||
// New expression
|
||||
newExpression = this.parser.RULE("newExpression", () => {
|
||||
this.consumeTokenType(tokens.New);
|
||||
this.parser.OR([
|
||||
// New with class instantiation
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.type);
|
||||
this.parser.MANY(() => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.expression),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
});
|
||||
},
|
||||
},
|
||||
// New with anonymous class
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
// Class body content
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Block expression
|
||||
blockExpression = this.parser.RULE("blockExpression", () => {
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.MANY(() => {
|
||||
this.subrule(this.blockStatement);
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
});
|
||||
|
||||
// Partial function literal
|
||||
partialFunctionLiteral = this.parser.RULE("partialFunctionLiteral", () => {
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.AT_LEAST_ONE(() => this.subrule(this.caseClause));
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
});
|
||||
|
||||
// Quote expression (Scala 3)
|
||||
quoteExpression = this.parser.RULE("quoteExpression", () => {
|
||||
this.consumeTokenType(tokens.Quote);
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.subrule(this.expression);
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
});
|
||||
|
||||
// Splice expression (Scala 3)
|
||||
spliceExpression = this.parser.RULE("spliceExpression", () => {
|
||||
this.consumeTokenType(tokens.Dollar);
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.subrule(this.expression);
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
});
|
||||
|
||||
// If expression
|
||||
ifExpression = this.parser.RULE("ifExpression", () => {
|
||||
this.consumeTokenType(tokens.If);
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.subrule(this.expression);
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
this.subrule(this.expression);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Else);
|
||||
this.subrule(this.expression);
|
||||
});
|
||||
});
|
||||
|
||||
// While expression
|
||||
whileExpression = this.parser.RULE("whileExpression", () => {
|
||||
this.consumeTokenType(tokens.While);
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.subrule(this.expression);
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
this.subrule(this.expression);
|
||||
});
|
||||
|
||||
// Try expression
|
||||
tryExpression = this.parser.RULE("tryExpression", () => {
|
||||
this.consumeTokenType(tokens.Try);
|
||||
this.subrule(this.expression);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Catch);
|
||||
this.parser.OR([
|
||||
// Pattern-based catch
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.MANY(() => this.subrule(this.caseClause));
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
},
|
||||
},
|
||||
// Expression-based catch
|
||||
{
|
||||
ALT: () => this.subrule(this.expression),
|
||||
},
|
||||
]);
|
||||
});
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Finally);
|
||||
this.subrule(this.expression);
|
||||
});
|
||||
});
|
||||
|
||||
// For expression/comprehension
|
||||
forExpression = this.parser.RULE("forExpression", () => {
|
||||
this.consumeTokenType(tokens.For);
|
||||
this.parser.OR([
|
||||
// For with parentheses
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.AT_LEAST_ONE_SEP({
|
||||
SEP: tokens.Semicolon,
|
||||
DEF: () => this.subrule(this.generator),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
// For with braces
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.MANY(() => this.subrule(this.generator));
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
},
|
||||
},
|
||||
]);
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Yield));
|
||||
this.subrule(this.expression);
|
||||
});
|
||||
|
||||
// Helper rule dependencies (to be implemented in other modules)
|
||||
polymorphicTypeParameter = this.parser.RULE(
|
||||
"polymorphicTypeParameter",
|
||||
() => {
|
||||
// Placeholder - should be in types.ts
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
},
|
||||
);
|
||||
|
||||
blockStatement = this.parser.RULE("blockStatement", () => {
|
||||
// Placeholder - should be in statements.ts
|
||||
this.subrule(this.expression);
|
||||
});
|
||||
}
|
||||
@@ -1,126 +0,0 @@
|
||||
/**
|
||||
* Literal parsing module for all Scala literal types
|
||||
*/
|
||||
import { BaseParserModule, tokens } from "./base";
|
||||
|
||||
// Module for literal parsing - no additional imports needed
|
||||
|
||||
export class LiteralParserMixin extends BaseParserModule {
|
||||
// Main literal rule
|
||||
literal = this.parser.RULE("literal", () => {
|
||||
this.parser.OR([
|
||||
// Numeric literals
|
||||
{ ALT: () => this.consumeTokenType(tokens.IntegerLiteral) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.FloatingPointLiteral) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.ScientificNotationLiteral) },
|
||||
|
||||
// Boolean literals
|
||||
{ ALT: () => this.consumeTokenType(tokens.True) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.False) },
|
||||
|
||||
// Character literal
|
||||
{ ALT: () => this.consumeTokenType(tokens.CharLiteral) },
|
||||
|
||||
// String literals
|
||||
{ ALT: () => this.consumeTokenType(tokens.StringLiteral) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.InterpolatedStringLiteral) },
|
||||
|
||||
// Interpolated strings
|
||||
{ ALT: () => this.subrule(this.interpolatedString) },
|
||||
|
||||
// Null literal
|
||||
{ ALT: () => this.consumeTokenType(tokens.Null) },
|
||||
|
||||
// Unit literal ()
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Interpolated string
|
||||
interpolatedString = this.parser.RULE("interpolatedString", () => {
|
||||
this.parser.OR([
|
||||
// s-interpolator
|
||||
{ ALT: () => this.consumeTokenType(tokens.InterpolatedString) },
|
||||
// f-interpolator
|
||||
{ ALT: () => this.consumeTokenType(tokens.FormattedString) },
|
||||
// raw-interpolator
|
||||
{ ALT: () => this.consumeTokenType(tokens.RawString) },
|
||||
// Custom interpolator
|
||||
{ ALT: () => this.consumeTokenType(tokens.CustomInterpolatedString) },
|
||||
]);
|
||||
});
|
||||
|
||||
// Numeric literal with suffix
|
||||
numericLiteral = this.parser.RULE("numericLiteral", () => {
|
||||
this.parser.OR([
|
||||
// Integer types
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.IntegerLiteral);
|
||||
this.parser.OPTION(() => {
|
||||
this.parser.OR([
|
||||
{ ALT: () => this.consumeTokenType(tokens.LongSuffix) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.IntSuffix) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.ShortSuffix) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.ByteSuffix) },
|
||||
]);
|
||||
});
|
||||
},
|
||||
},
|
||||
// Floating point types
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.FloatingPointLiteral);
|
||||
this.parser.OPTION(() => {
|
||||
this.parser.OR([
|
||||
{ ALT: () => this.consumeTokenType(tokens.FloatSuffix) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.DoubleSuffix) },
|
||||
]);
|
||||
});
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// XML literal (if XML support is needed)
|
||||
xmlLiteral = this.parser.RULE("xmlLiteral", () => {
|
||||
// Placeholder for XML literals
|
||||
// This would require XML-specific lexing
|
||||
this.consumeTokenType(tokens.StringLiteral);
|
||||
});
|
||||
|
||||
// Collection literal patterns (syntactic sugar)
|
||||
collectionLiteral = this.parser.RULE("collectionLiteral", () => {
|
||||
this.parser.OR([
|
||||
// List literal: List(1, 2, 3)
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Identifier); // List, Set, etc.
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.literal),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
// Array literal: Array(1, 2, 3)
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Array);
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.literal),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
}
|
||||
@@ -1,243 +0,0 @@
|
||||
/**
|
||||
* Pattern matching parsing module
|
||||
*/
|
||||
import { BaseParserModule, tokens } from "./base";
|
||||
import type { ParserMethod, CstNode } from "chevrotain";
|
||||
|
||||
export class PatternParserMixin extends BaseParserModule {
|
||||
// Dependencies from other modules
|
||||
literal!: ParserMethod<unknown[], CstNode>;
|
||||
qualifiedIdentifier!: ParserMethod<unknown[], CstNode>;
|
||||
type!: ParserMethod<unknown[], CstNode>;
|
||||
expression!: ParserMethod<unknown[], CstNode>;
|
||||
|
||||
// Pattern rule
|
||||
pattern = this.parser.RULE("pattern", () => {
|
||||
this.parser.OR([
|
||||
// Wildcard pattern: _
|
||||
{ ALT: () => this.consumeTokenType(tokens.Underscore) },
|
||||
// Literal pattern
|
||||
{ ALT: () => this.subrule(this.literal) },
|
||||
// Variable pattern (lowercase identifier)
|
||||
{
|
||||
ALT: () => this.consumeTokenType(tokens.Identifier),
|
||||
GATE: () => {
|
||||
const la1 = this.parser.LA(1);
|
||||
if (la1?.tokenType !== tokens.Identifier) return false;
|
||||
const firstChar = la1.image[0];
|
||||
return (
|
||||
firstChar === firstChar.toLowerCase() &&
|
||||
firstChar !== firstChar.toUpperCase()
|
||||
);
|
||||
},
|
||||
},
|
||||
// Stable identifier pattern (uppercase or qualified)
|
||||
{
|
||||
ALT: () => this.subrule(this.qualifiedIdentifier),
|
||||
},
|
||||
// Constructor pattern: Type(patterns...)
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.qualifiedIdentifier);
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.pattern),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
GATE: () => {
|
||||
// Look for Constructor(...)
|
||||
let i = 1;
|
||||
while (this.parser.LA(i)?.tokenType === tokens.Identifier) {
|
||||
if (this.parser.LA(i + 1)?.tokenType === tokens.Dot) {
|
||||
i += 2;
|
||||
} else {
|
||||
return this.parser.LA(i + 1)?.tokenType === tokens.LeftParen;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},
|
||||
// Tuple pattern: (p1, p2, ...)
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.pattern),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
// Typed pattern: pattern : Type
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.pattern);
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
GATE: () => {
|
||||
// Complex lookahead for typed patterns
|
||||
let i = 1;
|
||||
let parenDepth = 0;
|
||||
while (i < 20) {
|
||||
const token = this.parser.LA(i);
|
||||
if (!token) return false;
|
||||
if (token.tokenType === tokens.LeftParen) parenDepth++;
|
||||
if (token.tokenType === tokens.RightParen) parenDepth--;
|
||||
if (parenDepth === 0 && token.tokenType === tokens.Colon) {
|
||||
return true;
|
||||
}
|
||||
if (
|
||||
parenDepth === 0 &&
|
||||
(token.tokenType === tokens.Arrow ||
|
||||
token.tokenType === tokens.Equals ||
|
||||
token.tokenType === tokens.If)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},
|
||||
// Alternative pattern: p1 | p2 | ...
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.pattern);
|
||||
this.parser.MANY(() => {
|
||||
this.consumeTokenType(tokens.BitwiseOr);
|
||||
this.subrule(this.pattern);
|
||||
});
|
||||
},
|
||||
GATE: () => {
|
||||
// Look for | in patterns
|
||||
let i = 1;
|
||||
let parenDepth = 0;
|
||||
while (i < 20) {
|
||||
const token = this.parser.LA(i);
|
||||
if (!token) return false;
|
||||
if (token.tokenType === tokens.LeftParen) parenDepth++;
|
||||
if (token.tokenType === tokens.RightParen) parenDepth--;
|
||||
if (parenDepth === 0 && token.tokenType === tokens.BitwiseOr) {
|
||||
return true;
|
||||
}
|
||||
if (
|
||||
parenDepth === 0 &&
|
||||
(token.tokenType === tokens.Arrow ||
|
||||
token.tokenType === tokens.Equals)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Case clause (used in match expressions and partial functions)
|
||||
caseClause = this.parser.RULE("caseClause", () => {
|
||||
this.consumeTokenType(tokens.Case);
|
||||
this.subrule(this.pattern);
|
||||
|
||||
// Optional guard
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.If);
|
||||
this.subrule(this.expression);
|
||||
});
|
||||
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
|
||||
// Case body - can be expression or block
|
||||
this.parser.OR([
|
||||
// Block of statements
|
||||
{
|
||||
ALT: () => {
|
||||
this.parser.MANY(() => {
|
||||
this.subrule(this.expression);
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
},
|
||||
GATE: () => {
|
||||
// If next token is 'case' or '}', this is the end
|
||||
const la1 = this.parser.LA(1);
|
||||
return (
|
||||
la1?.tokenType !== tokens.Case &&
|
||||
la1?.tokenType !== tokens.RightBrace
|
||||
);
|
||||
},
|
||||
},
|
||||
// Empty case (rare but valid)
|
||||
{ ALT: () => {} },
|
||||
]);
|
||||
});
|
||||
|
||||
// Generator (used in for comprehensions)
|
||||
generator = this.parser.RULE("generator", () => {
|
||||
this.parser.OR([
|
||||
// Pattern generator: pattern <- expression
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.pattern);
|
||||
this.consumeTokenType(tokens.LeftArrow);
|
||||
this.subrule(this.expression);
|
||||
},
|
||||
},
|
||||
// Value definition: pattern = expression
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.pattern);
|
||||
this.consumeTokenType(tokens.Equals);
|
||||
this.subrule(this.expression);
|
||||
},
|
||||
},
|
||||
// Guard: if expression
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.If);
|
||||
this.subrule(this.expression);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Extractor pattern (for advanced pattern matching)
|
||||
extractorPattern = this.parser.RULE("extractorPattern", () => {
|
||||
this.subrule(this.qualifiedIdentifier);
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => {
|
||||
this.parser.OR([
|
||||
// Regular pattern
|
||||
{ ALT: () => this.subrule(this.pattern) },
|
||||
// Sequence pattern: _*
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Underscore);
|
||||
this.consumeTokenType(tokens.Star);
|
||||
},
|
||||
},
|
||||
]);
|
||||
},
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
});
|
||||
|
||||
// Infix pattern (for pattern matching with infix operators)
|
||||
infixPattern = this.parser.RULE("infixPattern", () => {
|
||||
this.subrule(this.pattern);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.subrule(this.pattern);
|
||||
});
|
||||
|
||||
// XML pattern (if XML support is needed)
|
||||
xmlPattern = this.parser.RULE("xmlPattern", () => {
|
||||
// Placeholder for XML patterns
|
||||
// This would require XML-specific tokens
|
||||
this.consumeTokenType(tokens.StringLiteral);
|
||||
});
|
||||
}
|
||||
@@ -1,298 +0,0 @@
|
||||
/**
|
||||
* Scala 3 specific features parsing module
|
||||
*/
|
||||
import { BaseParserModule, tokens } from "./base";
|
||||
import type { ParserMethod, CstNode } from "chevrotain";
|
||||
|
||||
export class Scala3ParserMixin extends BaseParserModule {
|
||||
// Dependencies from other modules
|
||||
annotation!: ParserMethod<unknown[], CstNode>;
|
||||
modifier!: ParserMethod<unknown[], CstNode>;
|
||||
typeParameters!: ParserMethod<unknown[], CstNode>;
|
||||
type!: ParserMethod<unknown[], CstNode>;
|
||||
expression!: ParserMethod<unknown[], CstNode>;
|
||||
pattern!: ParserMethod<unknown[], CstNode>;
|
||||
parameterLists!: ParserMethod<unknown[], CstNode>;
|
||||
classBody!: ParserMethod<unknown[], CstNode>;
|
||||
extendsClause!: ParserMethod<unknown[], CstNode>;
|
||||
qualifiedIdentifier!: ParserMethod<unknown[], CstNode>;
|
||||
valDefinition!: ParserMethod<unknown[], CstNode>;
|
||||
defDefinition!: ParserMethod<unknown[], CstNode>;
|
||||
typeDefinition!: ParserMethod<unknown[], CstNode>;
|
||||
|
||||
// Enum definition (Scala 3)
|
||||
enumDefinition = this.parser.RULE("enumDefinition", () => {
|
||||
this.consumeTokenType(tokens.Enum);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
this.parser.OPTION(() => this.subrule(this.extendsClause));
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.MANY(() => {
|
||||
this.parser.OR([
|
||||
{ ALT: () => this.subrule(this.enumCase) },
|
||||
{ ALT: () => this.subrule(this.classMember) },
|
||||
]);
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
});
|
||||
|
||||
// Enum case
|
||||
enumCase = this.parser.RULE("enumCase", () => {
|
||||
this.consumeTokenType(tokens.Case);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
});
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Extends);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
});
|
||||
|
||||
// Extension definition (Scala 3)
|
||||
extensionDefinition = this.parser.RULE("extensionDefinition", () => {
|
||||
this.consumeTokenType(tokens.Extension);
|
||||
|
||||
// Optional type parameters before the extended type
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
|
||||
// Extended type with parameters
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
|
||||
// Optional using/given clauses
|
||||
this.parser.MANY(() => this.subrule(this.parameterLists));
|
||||
|
||||
// Extension body
|
||||
this.parser.OR([
|
||||
// Single method
|
||||
{ ALT: () => this.subrule(this.extensionMember) },
|
||||
// Multiple methods in braces
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.MANY(() => {
|
||||
this.subrule(this.extensionMember);
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Extension member
|
||||
extensionMember = this.parser.RULE("extensionMember", () => {
|
||||
this.parser.MANY(() => this.subrule(this.annotation));
|
||||
this.parser.MANY(() => this.subrule(this.modifier));
|
||||
this.parser.OR([
|
||||
{ ALT: () => this.subrule(this.defDefinition) },
|
||||
{ ALT: () => this.subrule(this.valDefinition) },
|
||||
{ ALT: () => this.subrule(this.typeDefinition) },
|
||||
]);
|
||||
});
|
||||
|
||||
// Given definition (Scala 3)
|
||||
givenDefinition = this.parser.RULE("givenDefinition", () => {
|
||||
this.consumeTokenType(tokens.Given);
|
||||
|
||||
// Optional given name
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
});
|
||||
|
||||
// Optional type parameters
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
|
||||
// Optional parameter lists (for given with parameters)
|
||||
this.parser.MANY(() => this.subrule(this.parameterLists));
|
||||
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
|
||||
// Implementation
|
||||
this.parser.OR([
|
||||
// With implementation
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.With);
|
||||
this.parser.OR([
|
||||
// Block implementation
|
||||
{ ALT: () => this.subrule(this.classBody) },
|
||||
// Expression implementation
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Equals);
|
||||
this.subrule(this.expression);
|
||||
},
|
||||
},
|
||||
]);
|
||||
},
|
||||
},
|
||||
// Direct implementation with =
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Equals);
|
||||
this.subrule(this.expression);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Opaque type definition (Scala 3)
|
||||
opaqueTypeDefinition = this.parser.RULE("opaqueTypeDefinition", () => {
|
||||
this.consumeTokenType(tokens.Opaque);
|
||||
this.consumeTokenType(tokens.Type);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => this.subrule(this.typeParameters));
|
||||
|
||||
// Optional type bounds
|
||||
this.parser.OPTION(() => {
|
||||
this.parser.OR([
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.ColonLess);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
},
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.GreaterColon);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
this.consumeTokenType(tokens.Equals);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
|
||||
// Inline modifier handling (Scala 3)
|
||||
inlineDefinition = this.parser.RULE("inlineDefinition", () => {
|
||||
this.consumeTokenType(tokens.Inline);
|
||||
this.parser.OR([
|
||||
{
|
||||
ALT: () => this.subrule(this.defDefinition),
|
||||
},
|
||||
{
|
||||
ALT: () => this.subrule(this.valDefinition),
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Transparent modifier handling (Scala 3)
|
||||
transparentDefinition = this.parser.RULE("transparentDefinition", () => {
|
||||
this.consumeTokenType(tokens.Transparent);
|
||||
this.consumeTokenType(tokens.Inline);
|
||||
this.subrule(this.defDefinition);
|
||||
});
|
||||
|
||||
// Export clause (already implemented in statements, but Scala 3 specific)
|
||||
// Moved from statements module for better organization
|
||||
exportClause = this.parser.RULE("exportClause", () => {
|
||||
this.consumeTokenType(tokens.Export);
|
||||
this.subrule(this.exportExpression);
|
||||
this.parser.OPTION(() => this.consumeTokenType(tokens.Semicolon));
|
||||
});
|
||||
|
||||
exportExpression = this.parser.RULE("exportExpression", () => {
|
||||
this.subrule(this.qualifiedIdentifier);
|
||||
this.consumeTokenType(tokens.Dot);
|
||||
this.parser.MANY(() => {
|
||||
this.consumeTokenType(tokens.Dot);
|
||||
this.parser.OR([
|
||||
{
|
||||
ALT: () => this.consumeTokenType(tokens.Identifier),
|
||||
},
|
||||
{ ALT: () => this.consumeTokenType(tokens.Underscore) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Given) },
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.exportSelector),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
exportSelector = this.parser.RULE("exportSelector", () => {
|
||||
this.parser.OR([
|
||||
// given selector
|
||||
{ ALT: () => this.consumeTokenType(tokens.Given) },
|
||||
// Regular selector with optional rename
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => {
|
||||
this.parser.OR([
|
||||
// Rename: x => y
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
},
|
||||
},
|
||||
// Hide: x => _
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
this.consumeTokenType(tokens.Underscore);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Using clause (Scala 3 - for context parameters)
|
||||
usingClause = this.parser.RULE("usingClause", () => {
|
||||
this.consumeTokenType(tokens.Using);
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
});
|
||||
|
||||
// Helper rule placeholder
|
||||
classMember = this.parser.RULE("classMember", () => {
|
||||
// Placeholder - should be in definitions.ts
|
||||
this.parser.OR([
|
||||
{
|
||||
ALT: () => this.subrule(this.valDefinition),
|
||||
},
|
||||
{
|
||||
ALT: () => this.subrule(this.defDefinition),
|
||||
},
|
||||
{
|
||||
ALT: () => this.subrule(this.typeDefinition),
|
||||
},
|
||||
]);
|
||||
});
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
/**
|
||||
* Statement parsing module for package, import, and export declarations
|
||||
*/
|
||||
import { BaseParserModule, tokens } from "./base";
|
||||
import type { ParserMethod, CstNode } from "chevrotain";
|
||||
|
||||
export class StatementParserMixin extends BaseParserModule {
|
||||
// Dependencies from other modules
|
||||
qualifiedIdentifier!: ParserMethod<unknown[], CstNode>;
|
||||
expression!: ParserMethod<unknown[], CstNode>;
|
||||
|
||||
// Package declaration
|
||||
packageClause = this.parser.RULE("packageClause", () => {
|
||||
this.consumeTokenType(tokens.Package);
|
||||
this.subrule(this.qualifiedIdentifier);
|
||||
this.optionalConsume(tokens.Semicolon);
|
||||
});
|
||||
|
||||
// Import declaration
|
||||
importClause = this.parser.RULE("importClause", () => {
|
||||
this.consumeTokenType(tokens.Import);
|
||||
this.subrule(this.importExpression);
|
||||
this.optionalConsume(tokens.Semicolon);
|
||||
});
|
||||
|
||||
importExpression = this.parser.RULE("importExpression", () => {
|
||||
// Parse the base path (e.g., "scala.collection")
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.manyOf(() => {
|
||||
this.consumeTokenType(tokens.Dot);
|
||||
this.oneOf([
|
||||
// Next identifier in path
|
||||
{
|
||||
ALT: () =>
|
||||
this.parser.CONSUME(tokens.Identifier, { LABEL: "Identifier2" }),
|
||||
},
|
||||
// Wildcard import
|
||||
{ ALT: () => this.consumeTokenType(tokens.Underscore) },
|
||||
// Multiple import selectors
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.AT_LEAST_ONE_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.importSelector),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
importSelector = this.parser.RULE("importSelector", () => {
|
||||
this.oneOf([
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
this.oneOf([
|
||||
{
|
||||
ALT: () =>
|
||||
this.parser.CONSUME(tokens.Identifier, {
|
||||
LABEL: "Identifier2",
|
||||
}),
|
||||
},
|
||||
{ ALT: () => this.consumeTokenType(tokens.Underscore) },
|
||||
]);
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
ALT: () =>
|
||||
this.parser.CONSUME(tokens.Underscore, { LABEL: "Underscore2" }),
|
||||
}, // Allow wildcard import in selectors
|
||||
]);
|
||||
});
|
||||
|
||||
// Export declaration (Scala 3)
|
||||
exportClause = this.parser.RULE("exportClause", () => {
|
||||
this.consumeTokenType(tokens.Export);
|
||||
this.subrule(this.exportExpression);
|
||||
this.optionalConsume(tokens.Semicolon);
|
||||
});
|
||||
|
||||
exportExpression = this.parser.RULE("exportExpression", () => {
|
||||
// Parse the base path (e.g., "mypackage")
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.manyOf(() => {
|
||||
this.consumeTokenType(tokens.Dot);
|
||||
this.oneOf([
|
||||
// Next identifier in path
|
||||
{
|
||||
ALT: () =>
|
||||
this.parser.CONSUME(tokens.Identifier, { LABEL: "Identifier2" }),
|
||||
},
|
||||
// Given keyword for given exports
|
||||
{ ALT: () => this.consumeTokenType(tokens.Given) },
|
||||
// Wildcard export
|
||||
{ ALT: () => this.consumeTokenType(tokens.Underscore) },
|
||||
// Multiple export selectors
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.AT_LEAST_ONE_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.exportSelector),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
exportSelector = this.parser.RULE("exportSelector", () => {
|
||||
this.oneOf([
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
this.oneOf([
|
||||
{
|
||||
ALT: () =>
|
||||
this.parser.CONSUME(tokens.Identifier, {
|
||||
LABEL: "Identifier2",
|
||||
}),
|
||||
},
|
||||
{ ALT: () => this.consumeTokenType(tokens.Underscore) },
|
||||
]);
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
ALT: () =>
|
||||
this.parser.CONSUME(tokens.Underscore, { LABEL: "Underscore2" }),
|
||||
},
|
||||
{ ALT: () => this.consumeTokenType(tokens.Given) },
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Given);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Assignment statement (for sbt files and general assignments)
|
||||
assignmentStatement = this.parser.RULE("assignmentStatement", () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.oneOf([
|
||||
{ ALT: () => this.consumeTokenType(tokens.SbtAssign) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.PlusEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.MinusEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.StarEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.SlashEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.PercentEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.AppendEquals) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Equals) },
|
||||
]);
|
||||
this.subrule(this.expression);
|
||||
});
|
||||
}
|
||||
@@ -1,439 +0,0 @@
|
||||
/**
|
||||
* Type system parsing module for Scala types
|
||||
*/
|
||||
import { BaseParserModule, tokens } from "./base";
|
||||
import type { ParserMethod, CstNode } from "chevrotain";
|
||||
|
||||
export class TypeParserMixin extends BaseParserModule {
|
||||
// Dependencies from other modules
|
||||
qualifiedIdentifier!: ParserMethod<unknown[], CstNode>;
|
||||
expression!: ParserMethod<unknown[], CstNode>;
|
||||
literal!: ParserMethod<unknown[], CstNode>;
|
||||
|
||||
// Main type rule
|
||||
type = this.parser.RULE("type", () => {
|
||||
this.subrule(this.unionType);
|
||||
});
|
||||
|
||||
// Union types (Scala 3)
|
||||
unionType = this.parser.RULE("unionType", () => {
|
||||
this.subrule(this.intersectionType);
|
||||
this.parser.MANY(() => {
|
||||
this.consumeTokenType(tokens.BitwiseOr);
|
||||
this.subrule(this.intersectionType);
|
||||
});
|
||||
});
|
||||
|
||||
// Intersection types (Scala 3)
|
||||
intersectionType = this.parser.RULE("intersectionType", () => {
|
||||
this.subrule(this.baseType);
|
||||
this.parser.MANY(() => {
|
||||
this.consumeTokenType(tokens.BitwiseAnd);
|
||||
this.subrule(this.baseType);
|
||||
});
|
||||
});
|
||||
|
||||
// Base type
|
||||
baseType = this.parser.RULE("baseType", () => {
|
||||
this.parser.OR([
|
||||
// Simple type
|
||||
{ ALT: () => this.subrule(this.simpleType) },
|
||||
// Function type: A => B or (A, B) => C
|
||||
{
|
||||
ALT: () => {
|
||||
this.parser.OR([
|
||||
// Single parameter without parentheses
|
||||
{
|
||||
ALT: () => this.subrule(this.simpleType),
|
||||
},
|
||||
// Multiple parameters or single with parentheses
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.type),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
]);
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
GATE: () => {
|
||||
// Look ahead to detect function types
|
||||
let i = 1;
|
||||
const la1 = this.parser.LA(i);
|
||||
|
||||
// Simple function type: Type =>
|
||||
if (la1?.tokenType === tokens.Identifier) {
|
||||
const la2 = this.parser.LA(2);
|
||||
if (la2?.tokenType === tokens.Arrow) return true;
|
||||
if (la2?.tokenType === tokens.Dot) {
|
||||
// Handle qualified types like A.B =>
|
||||
i = 3;
|
||||
while (
|
||||
this.parser.LA(i)?.tokenType === tokens.Identifier &&
|
||||
this.parser.LA(i + 1)?.tokenType === tokens.Dot
|
||||
) {
|
||||
i += 2;
|
||||
}
|
||||
return this.parser.LA(i + 1)?.tokenType === tokens.Arrow;
|
||||
}
|
||||
}
|
||||
|
||||
// Parenthesized function type: (...) =>
|
||||
if (la1?.tokenType === tokens.LeftParen) {
|
||||
let parenCount = 1;
|
||||
i = 2;
|
||||
while (parenCount > 0 && i < 50) {
|
||||
const token = this.parser.LA(i);
|
||||
if (token?.tokenType === tokens.LeftParen) parenCount++;
|
||||
if (token?.tokenType === tokens.RightParen) parenCount--;
|
||||
i++;
|
||||
}
|
||||
return this.parser.LA(i)?.tokenType === tokens.Arrow;
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
},
|
||||
// Context function type (Scala 3): A ?=> B
|
||||
{
|
||||
ALT: () => this.subrule(this.contextFunctionType),
|
||||
GATE: () => {
|
||||
// Look for ?=> pattern
|
||||
let i = 1;
|
||||
while (i < 20) {
|
||||
const token = this.parser.LA(i);
|
||||
if (token?.tokenType === tokens.QuestionArrow) return true;
|
||||
if (!token) return false;
|
||||
i++;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},
|
||||
// Dependent function type (Scala 3)
|
||||
{
|
||||
ALT: () => this.subrule(this.dependentFunctionType),
|
||||
GATE: () => {
|
||||
const la1 = this.parser.LA(1);
|
||||
const la2 = this.parser.LA(2);
|
||||
const la3 = this.parser.LA(3);
|
||||
return (
|
||||
la1?.tokenType === tokens.LeftParen &&
|
||||
la2?.tokenType === tokens.Identifier &&
|
||||
la3?.tokenType === tokens.Colon
|
||||
);
|
||||
},
|
||||
},
|
||||
// Polymorphic function type (Scala 3): [T] => T => T
|
||||
{
|
||||
ALT: () => this.subrule(this.polymorphicFunctionType),
|
||||
GATE: () => {
|
||||
const la1 = this.parser.LA(1);
|
||||
if (la1?.tokenType !== tokens.LeftBracket) return false;
|
||||
|
||||
// Look for ] =>> pattern
|
||||
let i = 2;
|
||||
let bracketCount = 1;
|
||||
while (bracketCount > 0 && i < 30) {
|
||||
const token = this.parser.LA(i);
|
||||
if (token?.tokenType === tokens.LeftBracket) bracketCount++;
|
||||
if (token?.tokenType === tokens.RightBracket) bracketCount--;
|
||||
i++;
|
||||
}
|
||||
return this.parser.LA(i)?.tokenType === tokens.DoubleArrow;
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Simple type
|
||||
simpleType = this.parser.RULE("simpleType", () => {
|
||||
this.parser.OR([
|
||||
// Literal type
|
||||
{
|
||||
ALT: () => this.subrule(this.literal),
|
||||
GATE: () => {
|
||||
const la1 = this.parser.LA(1);
|
||||
return (
|
||||
la1?.tokenType === tokens.IntegerLiteral ||
|
||||
la1?.tokenType === tokens.FloatingPointLiteral ||
|
||||
la1?.tokenType === tokens.True ||
|
||||
la1?.tokenType === tokens.CharLiteral ||
|
||||
la1?.tokenType === tokens.StringLiteral ||
|
||||
la1?.tokenType === tokens.Null
|
||||
);
|
||||
},
|
||||
},
|
||||
// Tuple type or parenthesized type
|
||||
{ ALT: () => this.subrule(this.tupleTypeOrParenthesized) },
|
||||
// Type projection: T#U
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.simpleType);
|
||||
this.consumeTokenType(tokens.Hash);
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
},
|
||||
GATE: () => {
|
||||
// Complex lookahead for type projection
|
||||
let i = 1;
|
||||
while (i < 20) {
|
||||
const token = this.parser.LA(i);
|
||||
if (token?.tokenType === tokens.Hash) return true;
|
||||
if (
|
||||
!token ||
|
||||
token.tokenType === tokens.Arrow ||
|
||||
token.tokenType === tokens.Comma
|
||||
)
|
||||
return false;
|
||||
i++;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},
|
||||
// Singleton type: x.type
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.qualifiedIdentifier);
|
||||
this.consumeTokenType(tokens.Dot);
|
||||
this.consumeTokenType(tokens.Type);
|
||||
},
|
||||
GATE: () => {
|
||||
let i = 1;
|
||||
while (
|
||||
this.parser.LA(i)?.tokenType === tokens.Identifier &&
|
||||
this.parser.LA(i + 1)?.tokenType === tokens.Dot
|
||||
) {
|
||||
i += 2;
|
||||
}
|
||||
return (
|
||||
this.parser.LA(i)?.tokenType === tokens.Identifier &&
|
||||
this.parser.LA(i + 1)?.tokenType === tokens.Dot &&
|
||||
this.parser.LA(i + 2)?.tokenType === tokens.Type
|
||||
);
|
||||
},
|
||||
},
|
||||
// Wildcard type: _
|
||||
{
|
||||
ALT: () => this.consumeTokenType(tokens.Underscore),
|
||||
},
|
||||
// Kind projector: * or ?
|
||||
{
|
||||
ALT: () => {
|
||||
this.parser.OR([
|
||||
{ ALT: () => this.consumeTokenType(tokens.Star) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Question) },
|
||||
]);
|
||||
},
|
||||
},
|
||||
// Array type constructor
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.Array);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.LeftBracket);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.typeArgument),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBracket);
|
||||
});
|
||||
},
|
||||
},
|
||||
// Regular type with optional type arguments
|
||||
{
|
||||
ALT: () => {
|
||||
this.subrule(this.qualifiedIdentifier);
|
||||
this.parser.OPTION(() => {
|
||||
this.consumeTokenType(tokens.LeftBracket);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.typeArgument),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBracket);
|
||||
});
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// Type argument
|
||||
typeArgument = this.parser.RULE("typeArgument", () => {
|
||||
// Optional variance annotation
|
||||
this.parser.OPTION(() => {
|
||||
this.parser.OR([
|
||||
{ ALT: () => this.consumeTokenType(tokens.Plus) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Minus) },
|
||||
]);
|
||||
});
|
||||
this.subrule(this.type);
|
||||
});
|
||||
|
||||
// Tuple type or parenthesized type
|
||||
tupleTypeOrParenthesized = this.parser.RULE(
|
||||
"tupleTypeOrParenthesized",
|
||||
() => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.OPTION(() => {
|
||||
this.subrule(this.type);
|
||||
this.parser.MANY(() => {
|
||||
this.consumeTokenType(tokens.Comma);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
);
|
||||
|
||||
// Context function type (Scala 3)
|
||||
contextFunctionType = this.parser.RULE("contextFunctionType", () => {
|
||||
this.parser.OR([
|
||||
// Single parameter
|
||||
{ ALT: () => this.subrule(this.simpleType) },
|
||||
// Multiple parameters
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.type),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
},
|
||||
},
|
||||
]);
|
||||
this.consumeTokenType(tokens.QuestionArrow);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
|
||||
// Dependent function type (Scala 3)
|
||||
dependentFunctionType = this.parser.RULE("dependentFunctionType", () => {
|
||||
this.consumeTokenType(tokens.LeftParen);
|
||||
this.parser.AT_LEAST_ONE_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.dependentParameter),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightParen);
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
|
||||
// Dependent parameter
|
||||
dependentParameter = this.parser.RULE("dependentParameter", () => {
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
this.consumeTokenType(tokens.Colon);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
|
||||
// Polymorphic function type (Scala 3)
|
||||
polymorphicFunctionType = this.parser.RULE("polymorphicFunctionType", () => {
|
||||
this.consumeTokenType(tokens.LeftBracket);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.typeLambdaParameter),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBracket);
|
||||
this.consumeTokenType(tokens.DoubleArrow);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
|
||||
// Type lambda (Scala 3)
|
||||
typeLambda = this.parser.RULE("typeLambda", () => {
|
||||
this.consumeTokenType(tokens.LeftBracket);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.typeLambdaParameter),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBracket);
|
||||
this.consumeTokenType(tokens.DoubleArrow);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
|
||||
// Type lambda parameter
|
||||
typeLambdaParameter = this.parser.RULE("typeLambdaParameter", () => {
|
||||
// Optional variance
|
||||
this.parser.OPTION(() => {
|
||||
this.parser.OR([
|
||||
{ ALT: () => this.consumeTokenType(tokens.Plus) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Minus) },
|
||||
]);
|
||||
});
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
// Optional type bounds
|
||||
this.parser.OPTION(() => {
|
||||
this.parser.OR([
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.ColonLess);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
},
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.GreaterColon);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
// Type parameters
|
||||
typeParameters = this.parser.RULE("typeParameters", () => {
|
||||
this.consumeTokenType(tokens.LeftBracket);
|
||||
this.parser.MANY_SEP({
|
||||
SEP: tokens.Comma,
|
||||
DEF: () => this.subrule(this.typeParameter),
|
||||
});
|
||||
this.consumeTokenType(tokens.RightBracket);
|
||||
});
|
||||
|
||||
// Type parameter
|
||||
typeParameter = this.parser.RULE("typeParameter", () => {
|
||||
// Optional variance annotation
|
||||
this.parser.OPTION(() => {
|
||||
this.parser.OR([
|
||||
{ ALT: () => this.consumeTokenType(tokens.Plus) },
|
||||
{ ALT: () => this.consumeTokenType(tokens.Minus) },
|
||||
]);
|
||||
});
|
||||
this.consumeTokenType(tokens.Identifier);
|
||||
// Optional type bounds
|
||||
this.parser.OPTION(() => {
|
||||
this.parser.OR([
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.ColonLess);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
},
|
||||
{
|
||||
ALT: () => {
|
||||
this.consumeTokenType(tokens.GreaterColon);
|
||||
this.subrule(this.type);
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
// Match type (Scala 3)
|
||||
matchType = this.parser.RULE("matchType", () => {
|
||||
this.subrule(this.type);
|
||||
this.consumeTokenType(tokens.Match);
|
||||
this.consumeTokenType(tokens.LeftBrace);
|
||||
this.parser.MANY(() => this.subrule(this.matchTypeCase));
|
||||
this.consumeTokenType(tokens.RightBrace);
|
||||
});
|
||||
|
||||
// Match type case
|
||||
matchTypeCase = this.parser.RULE("matchTypeCase", () => {
|
||||
this.consumeTokenType(tokens.Case);
|
||||
this.subrule(this.type);
|
||||
this.consumeTokenType(tokens.Arrow);
|
||||
this.subrule(this.type);
|
||||
});
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
import type {
|
||||
CstNode,
|
||||
IToken,
|
||||
ILexingError,
|
||||
IRecognitionException,
|
||||
CstElement,
|
||||
} from "chevrotain";
|
||||
|
||||
export interface SourceLocation {
|
||||
startOffset: number;
|
||||
endOffset: number;
|
||||
startLine: number;
|
||||
endLine: number;
|
||||
startColumn: number;
|
||||
endColumn: number;
|
||||
}
|
||||
|
||||
export interface ScalaCstNode extends CstNode {
|
||||
name: string;
|
||||
children: Record<string, CstElement[]>;
|
||||
location?: SourceLocation;
|
||||
// Additional properties for compatibility
|
||||
image?: string;
|
||||
type?: string;
|
||||
originalComments?: string[];
|
||||
startLine?: number;
|
||||
value?: string;
|
||||
startOffset?: number;
|
||||
endOffset?: number;
|
||||
}
|
||||
|
||||
export interface ParseResult {
|
||||
cst: ScalaCstNode;
|
||||
errors: IRecognitionException[];
|
||||
comments: IToken[];
|
||||
}
|
||||
|
||||
export interface LexResult {
|
||||
tokens: IToken[];
|
||||
errors: ILexingError[];
|
||||
groups: {
|
||||
comments?: IToken[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface TokenBounds {
|
||||
start: number;
|
||||
end: number;
|
||||
}
|
||||
|
||||
export interface LineColumn {
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
|
||||
// Chevrotain パーサーメソッドの戻り値型
|
||||
export interface ParserMethodResult extends CstNode {
|
||||
name: string;
|
||||
children: Record<string, (CstNode | IToken)[]>;
|
||||
}
|
||||
|
||||
// パーサールールの型定義
|
||||
export type ParserRule<T = ParserMethodResult> = () => T;
|
||||
@@ -1,182 +0,0 @@
|
||||
/**
|
||||
* Unicode utilities for Scala parser
|
||||
* Handles Unicode normalization and character validation
|
||||
*/
|
||||
|
||||
/**
|
||||
* Normalizes Unicode strings using NFC (Canonical Decomposition, followed by Canonical Composition)
|
||||
* This ensures consistent representation of Unicode characters.
|
||||
*
|
||||
* @param text - The input text to normalize
|
||||
* @returns The normalized text
|
||||
*/
|
||||
export function normalizeUnicode(text: string): string {
|
||||
return text.normalize("NFC");
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a character is a valid Scala identifier start character
|
||||
* Follows Unicode identifier specification for Scala
|
||||
*
|
||||
* @param char - The character to check
|
||||
* @returns True if the character can start an identifier
|
||||
*/
|
||||
export function isIdentifierStart(char: string): boolean {
|
||||
if (char.length !== 1) return false;
|
||||
|
||||
const codePoint = char.codePointAt(0);
|
||||
if (codePoint === undefined) return false;
|
||||
|
||||
// Basic ASCII identifier characters
|
||||
if (
|
||||
(codePoint >= 0x41 && codePoint <= 0x5a) || // A-Z
|
||||
(codePoint >= 0x61 && codePoint <= 0x7a) || // a-z
|
||||
codePoint === 0x5f || // _
|
||||
codePoint === 0x24
|
||||
) {
|
||||
// $
|
||||
return true;
|
||||
}
|
||||
|
||||
// Mathematical symbols range (extended)
|
||||
if (
|
||||
(codePoint >= 0x2200 && codePoint <= 0x22ff) || // Mathematical Operators
|
||||
(codePoint >= 0x27c0 && codePoint <= 0x27ef) || // Miscellaneous Mathematical Symbols-A
|
||||
(codePoint >= 0x2980 && codePoint <= 0x29ff) || // Miscellaneous Mathematical Symbols-B
|
||||
(codePoint >= 0x2a00 && codePoint <= 0x2aff)
|
||||
) {
|
||||
// Supplemental Mathematical Operators
|
||||
return true;
|
||||
}
|
||||
|
||||
// Use Unicode property test for other characters (excluding digits for start characters)
|
||||
const testRegex = /\p{L}|\p{Mn}|\p{Mc}|\p{Pc}/u;
|
||||
return testRegex.test(char);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a character is a valid Scala identifier continuation character
|
||||
*
|
||||
* @param char - The character to check
|
||||
* @returns True if the character can continue an identifier
|
||||
*/
|
||||
export function isIdentifierContinue(char: string): boolean {
|
||||
if (char.length !== 1) return false;
|
||||
|
||||
const codePoint = char.codePointAt(0);
|
||||
if (codePoint === undefined) return false;
|
||||
|
||||
// Basic ASCII identifier characters
|
||||
if (
|
||||
(codePoint >= 0x41 && codePoint <= 0x5a) || // A-Z
|
||||
(codePoint >= 0x61 && codePoint <= 0x7a) || // a-z
|
||||
(codePoint >= 0x30 && codePoint <= 0x39) || // 0-9
|
||||
codePoint === 0x5f || // _
|
||||
codePoint === 0x24
|
||||
) {
|
||||
// $
|
||||
return true;
|
||||
}
|
||||
|
||||
// Mathematical symbols range (extended)
|
||||
if (
|
||||
(codePoint >= 0x2200 && codePoint <= 0x22ff) || // Mathematical Operators
|
||||
(codePoint >= 0x27c0 && codePoint <= 0x27ef) || // Miscellaneous Mathematical Symbols-A
|
||||
(codePoint >= 0x2980 && codePoint <= 0x29ff) || // Miscellaneous Mathematical Symbols-B
|
||||
(codePoint >= 0x2a00 && codePoint <= 0x2aff)
|
||||
) {
|
||||
// Supplemental Mathematical Operators
|
||||
return true;
|
||||
}
|
||||
|
||||
// Use Unicode property test for other characters (including format characters)
|
||||
const testRegex = /\p{L}|\p{Mn}|\p{Mc}|\p{Nd}|\p{Pc}|\p{Cf}/u;
|
||||
return testRegex.test(char);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that a string is a valid Scala identifier
|
||||
*
|
||||
* @param identifier - The identifier string to validate
|
||||
* @returns True if the string is a valid identifier
|
||||
*/
|
||||
export function isValidIdentifier(identifier: string): boolean {
|
||||
if (!identifier || identifier.length === 0) return false;
|
||||
|
||||
// Normalize the identifier
|
||||
const normalized = normalizeUnicode(identifier);
|
||||
|
||||
// Check first character
|
||||
if (!isIdentifierStart(normalized[0])) return false;
|
||||
|
||||
// Check remaining characters
|
||||
for (let i = 1; i < normalized.length; i++) {
|
||||
if (!isIdentifierContinue(normalized[i])) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts Unicode escape sequences in strings to actual Unicode characters
|
||||
* Handles \uXXXX patterns in string literals
|
||||
*
|
||||
* @param text - The text containing Unicode escapes
|
||||
* @returns The text with Unicode escapes converted to actual characters
|
||||
*/
|
||||
export function processUnicodeEscapes(text: string): string {
|
||||
return text.replace(/\\u([0-9A-Fa-f]{4})/g, (_, hex) => {
|
||||
const codePoint = parseInt(hex, 16);
|
||||
return String.fromCharCode(codePoint);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Escapes Unicode characters in strings for safe output
|
||||
* Converts non-ASCII characters back to \uXXXX format if needed
|
||||
*
|
||||
* @param text - The text to escape
|
||||
* @param escapeNonAscii - Whether to escape all non-ASCII characters
|
||||
* @returns The escaped text
|
||||
*/
|
||||
export function escapeUnicode(text: string, escapeNonAscii = false): string {
|
||||
if (!escapeNonAscii) return text;
|
||||
|
||||
return text.replace(/[\u0080-\uFFFF]/g, (char) => {
|
||||
const codePoint = char.charCodeAt(0);
|
||||
return `\\u${codePoint.toString(16).padStart(4, "0").toUpperCase()}`;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extended mathematical symbols commonly used in Scala functional programming
|
||||
*/
|
||||
export const MATHEMATICAL_SYMBOLS = {
|
||||
// Greek letters commonly used in functional programming
|
||||
ALPHA: "α", // U+03B1
|
||||
BETA: "β", // U+03B2
|
||||
GAMMA: "γ", // U+03B3
|
||||
DELTA: "δ", // U+03B4
|
||||
LAMBDA: "λ", // U+03BB
|
||||
MU: "μ", // U+03BC
|
||||
PI: "π", // U+03C0
|
||||
SIGMA: "σ", // U+03C3
|
||||
TAU: "τ", // U+03C4
|
||||
PHI: "φ", // U+03C6
|
||||
|
||||
// Mathematical operators
|
||||
FORALL: "∀", // U+2200
|
||||
EXISTS: "∃", // U+2203
|
||||
ELEMENT_OF: "∈", // U+2208
|
||||
NOT_ELEMENT_OF: "∉", // U+2209
|
||||
SUBSET: "⊂", // U+2282
|
||||
SUPERSET: "⊃", // U+2283
|
||||
UNION: "∪", // U+222A
|
||||
INTERSECTION: "∩", // U+2229
|
||||
|
||||
// Arrows and other symbols
|
||||
RIGHTWARDS_ARROW: "→", // U+2192
|
||||
LEFTWARDS_ARROW: "←", // U+2190
|
||||
UP_ARROW: "↑", // U+2191
|
||||
DOWN_ARROW: "↓", // U+2193
|
||||
} as const;
|
||||
@@ -1,538 +0,0 @@
|
||||
/**
|
||||
* CSTノードビジターのメインモジュール
|
||||
* 各種ビジターモジュールを統合して使用
|
||||
*/
|
||||
import {
|
||||
DeclarationVisitorMethods,
|
||||
type DeclarationVisitor,
|
||||
} from "./visitor/declarations";
|
||||
import {
|
||||
ExpressionVisitorMethods,
|
||||
type ExpressionVisitor,
|
||||
} from "./visitor/expressions";
|
||||
import { Scala3VisitorMethods, type Scala3Visitor } from "./visitor/scala3";
|
||||
import {
|
||||
StatementVisitorMethods,
|
||||
type StatementVisitor,
|
||||
} from "./visitor/statements";
|
||||
import { TypeVisitorMethods, type TypeVisitor } from "./visitor/types";
|
||||
import {
|
||||
getPrintWidth,
|
||||
getTabWidth,
|
||||
formatStatement,
|
||||
formatStringLiteral,
|
||||
createIndent,
|
||||
attachOriginalComments,
|
||||
} from "./visitor/utils";
|
||||
import type { PrintContext, CSTNode } from "./visitor/utils";
|
||||
import type { ScalaCstNode } from "@/common/prettier/plugins/scala/scala-parser";
|
||||
|
||||
// 外部使用のためのユーティリティ型の再エクスポート
|
||||
export type { PrintContext, CSTNode, PrettierOptions } from "./visitor/utils";
|
||||
|
||||
// 後方互換性のための型エイリアス
|
||||
type VisitorContext = PrintContext;
|
||||
|
||||
/**
|
||||
* CSTノードを訪問してフォーマット済みのテキストに変換するビジター
|
||||
* 各種言語構造に対応するビジターモジュールを統合
|
||||
*/
|
||||
export class CstNodeVisitor
|
||||
implements
|
||||
DeclarationVisitor,
|
||||
ExpressionVisitor,
|
||||
StatementVisitor,
|
||||
TypeVisitor,
|
||||
Scala3Visitor
|
||||
{
|
||||
// ビジターモジュールの初期化
|
||||
private declarations = new DeclarationVisitorMethods(this);
|
||||
private expressions = new ExpressionVisitorMethods(this);
|
||||
private statements = new StatementVisitorMethods(this);
|
||||
private types = new TypeVisitorMethods(this);
|
||||
private scala3 = new Scala3VisitorMethods(this);
|
||||
|
||||
/**
|
||||
* CSTノードを訪問してフォーマット済みテキストに変換
|
||||
* @param node - 訪問対象のCSTノード
|
||||
* @param ctx - 印刷コンテキスト(オプション、パスなど)
|
||||
* @returns フォーマット済みの文字列
|
||||
*/
|
||||
visit(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
if (!node) return "";
|
||||
|
||||
try {
|
||||
// オリジナルコメントを含むルートノードの処理
|
||||
if (
|
||||
"type" in node &&
|
||||
node.type === "compilationUnit" &&
|
||||
"originalComments" in node &&
|
||||
node.originalComments
|
||||
) {
|
||||
const nodeResult = this.visitCore(node, ctx);
|
||||
// originalCommentsの安全な型変換
|
||||
const comments = Array.isArray(node.originalComments)
|
||||
? (node.originalComments as unknown as CSTNode[])
|
||||
: [];
|
||||
return attachOriginalComments(nodeResult, comments);
|
||||
}
|
||||
|
||||
return this.visitCore(node, ctx);
|
||||
} catch (error) {
|
||||
const nodeName = "name" in node ? node.name : "unknown";
|
||||
console.error(`Error visiting node ${nodeName}:`, error);
|
||||
|
||||
// フォーマットエラー時の安全なフォールバック
|
||||
if ("image" in node && node.image) {
|
||||
return String(node.image);
|
||||
}
|
||||
|
||||
return `/* FORMAT ERROR: ${nodeName} */`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* CSTノード訪問のコアロジック
|
||||
* @param node - 訪問対象のCSTノード
|
||||
* @param ctx - 印刷コンテキスト
|
||||
* @returns フォーマット済みの文字列
|
||||
*/
|
||||
private visitCore(node: CSTNode, ctx: PrintContext): string {
|
||||
try {
|
||||
// トークンノードの処理
|
||||
if ("image" in node && node.image !== undefined) {
|
||||
return node.image;
|
||||
}
|
||||
|
||||
// ルール名によるCSTノードの処理
|
||||
if ("name" in node && node.name) {
|
||||
// ルール名の最初の文字を大文字化
|
||||
const ruleName = node.name.charAt(0).toUpperCase() + node.name.slice(1);
|
||||
const methodName = `visit${ruleName}`;
|
||||
if (
|
||||
typeof (this as Record<string, unknown>)[methodName] === "function"
|
||||
) {
|
||||
return (
|
||||
(this as Record<string, unknown>)[methodName] as (
|
||||
node: ScalaCstNode,
|
||||
ctx: VisitorContext,
|
||||
) => string
|
||||
)(node, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
// If no specific visitor method exists, try default handling by type
|
||||
if ("children" in node && node.children) {
|
||||
return this.visitChildren(node, ctx);
|
||||
}
|
||||
|
||||
return "";
|
||||
} catch (error) {
|
||||
const nodeName = "name" in node ? node.name : "unknown";
|
||||
console.error(`Error in visitCore for ${nodeName}:`, error);
|
||||
|
||||
// Try to recover by visiting children directly
|
||||
if ("children" in node && node.children) {
|
||||
try {
|
||||
return this.visitChildren(node, ctx);
|
||||
} catch (childError) {
|
||||
console.error(`Error visiting children of ${nodeName}:`, childError);
|
||||
return `/* ERROR: ${nodeName} */`;
|
||||
}
|
||||
}
|
||||
|
||||
return "image" in node && node.image ? node.image : "";
|
||||
}
|
||||
}
|
||||
|
||||
visitChildren(node: CSTNode, ctx: PrintContext): string {
|
||||
const parts: string[] = [];
|
||||
|
||||
if (!("children" in node) || !node.children) return "";
|
||||
|
||||
try {
|
||||
for (const [key, children] of Object.entries(node.children)) {
|
||||
if (Array.isArray(children)) {
|
||||
for (const child of children) {
|
||||
try {
|
||||
// Type guard for ScalaCstNode
|
||||
if ("children" in child && "name" in child) {
|
||||
const part = this.visit(child as ScalaCstNode, ctx);
|
||||
if (part) {
|
||||
parts.push(part);
|
||||
}
|
||||
} else {
|
||||
// Handle IToken
|
||||
const tokenImage = "image" in child ? child.image : "";
|
||||
if (tokenImage) {
|
||||
parts.push(tokenImage);
|
||||
}
|
||||
}
|
||||
} catch (childError) {
|
||||
const childName = "name" in child ? child.name : "token";
|
||||
console.error(
|
||||
`Error visiting child ${childName || "unknown"} in ${key}:`,
|
||||
childError,
|
||||
);
|
||||
// Continue with next child instead of failing completely
|
||||
parts.push(`/* ERROR: ${childName || "unknown"} */`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error visiting children of ${node.name || "unknown"}:`,
|
||||
error,
|
||||
);
|
||||
return `/* ERROR: ${node.name || "unknown"} children */`;
|
||||
}
|
||||
|
||||
return parts.join(" ");
|
||||
}
|
||||
|
||||
// Utility methods for shared functionality
|
||||
getIndentation(ctx: PrintContext): string {
|
||||
return createIndent(1, ctx);
|
||||
}
|
||||
|
||||
getPrintWidth(ctx: PrintContext): number {
|
||||
return getPrintWidth(ctx);
|
||||
}
|
||||
|
||||
getTabWidth(ctx: PrintContext): number {
|
||||
return getTabWidth(ctx);
|
||||
}
|
||||
|
||||
formatStatement(statement: string, ctx: PrintContext): string {
|
||||
return formatStatement(statement, ctx);
|
||||
}
|
||||
|
||||
formatStringLiteral(content: string, ctx: PrintContext): string {
|
||||
return formatStringLiteral(content, ctx);
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// Delegation methods to modular visitors
|
||||
// ==========================================
|
||||
|
||||
// Compilation unit and top-level structure
|
||||
visitCompilationUnit(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.statements.visitCompilationUnit(node, ctx);
|
||||
}
|
||||
|
||||
// Package and imports/exports
|
||||
visitPackageClause(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.statements.visitPackageClause(node, ctx);
|
||||
}
|
||||
|
||||
visitImportClause(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.statements.visitImportClause(node, ctx);
|
||||
}
|
||||
|
||||
visitImportExpression(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.statements.visitImportExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitImportSelector(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.statements.visitImportSelector(node, ctx);
|
||||
}
|
||||
|
||||
visitExportClause(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitExportClause(node, ctx);
|
||||
}
|
||||
|
||||
visitExportExpression(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitExportExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitExportSelector(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitExportSelector(node, ctx);
|
||||
}
|
||||
|
||||
// Definitions and declarations
|
||||
visitTopLevelDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.statements.visitTopLevelDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.statements.visitDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitAnnotations(annotations: CSTNode[], ctx: PrintContext): string {
|
||||
return this.statements.visitAnnotations(annotations, ctx);
|
||||
}
|
||||
|
||||
visitAnnotation(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.statements.visitAnnotation(node, ctx);
|
||||
}
|
||||
|
||||
visitAnnotationArgument(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.statements.visitAnnotationArgument(node, ctx);
|
||||
}
|
||||
|
||||
visitModifiers(modifiers: CSTNode[], ctx: PrintContext): string {
|
||||
return this.statements.visitModifiers(modifiers, ctx);
|
||||
}
|
||||
|
||||
// Class-related declarations
|
||||
visitClassDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitClassDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitObjectDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitObjectDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitTraitDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitTraitDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitValDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitValDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitVarDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitVarDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitDefDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitDefDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitTypeDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitTypeDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitAuxiliaryConstructor(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitAuxiliaryConstructor(node, ctx);
|
||||
}
|
||||
|
||||
visitClassParameters(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitClassParameters(node, ctx);
|
||||
}
|
||||
|
||||
visitClassParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitClassParameter(node, ctx);
|
||||
}
|
||||
|
||||
visitParameterLists(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitParameterLists(node, ctx);
|
||||
}
|
||||
|
||||
visitParameterList(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitParameterList(node, ctx);
|
||||
}
|
||||
|
||||
visitParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitParameter(node, ctx);
|
||||
}
|
||||
|
||||
visitTypeParameters(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitTypeParameters(node, ctx);
|
||||
}
|
||||
|
||||
visitTypeParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitTypeParameter(node, ctx);
|
||||
}
|
||||
|
||||
visitExtendsClause(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitExtendsClause(node, ctx);
|
||||
}
|
||||
|
||||
visitClassBody(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitClassBody(node, ctx);
|
||||
}
|
||||
|
||||
visitClassMember(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.declarations.visitClassMember(node, ctx);
|
||||
}
|
||||
|
||||
// Type system
|
||||
visitType(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitType(node, ctx);
|
||||
}
|
||||
|
||||
visitMatchType(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitMatchType(node, ctx);
|
||||
}
|
||||
|
||||
visitMatchTypeCase(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitMatchTypeCase(node, ctx);
|
||||
}
|
||||
|
||||
visitUnionType(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitUnionType(node, ctx);
|
||||
}
|
||||
|
||||
visitIntersectionType(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitIntersectionType(node, ctx);
|
||||
}
|
||||
|
||||
visitBaseType(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitBaseType(node, ctx);
|
||||
}
|
||||
|
||||
visitTupleTypeOrParenthesized(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitTupleTypeOrParenthesized(node, ctx);
|
||||
}
|
||||
|
||||
visitSimpleType(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitSimpleType(node, ctx);
|
||||
}
|
||||
|
||||
visitTypeArgument(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitTypeArgument(node, ctx);
|
||||
}
|
||||
|
||||
visitTypeArgumentUnion(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitTypeArgumentUnion(node, ctx);
|
||||
}
|
||||
|
||||
visitTypeArgumentIntersection(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitTypeArgumentIntersection(node, ctx);
|
||||
}
|
||||
|
||||
visitTypeArgumentSimple(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitTypeArgumentSimple(node, ctx);
|
||||
}
|
||||
|
||||
visitTypeLambda(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitTypeLambda(node, ctx);
|
||||
}
|
||||
|
||||
visitTypeLambdaParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitTypeLambdaParameter(node, ctx);
|
||||
}
|
||||
|
||||
visitDependentFunctionType(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitDependentFunctionType(node, ctx);
|
||||
}
|
||||
|
||||
visitDependentParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitDependentParameter(node, ctx);
|
||||
}
|
||||
|
||||
// Expressions
|
||||
visitExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitPostfixExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitPostfixExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitPrimaryExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitPrimaryExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitAssignmentStatement(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitAssignmentStatement(node, ctx);
|
||||
}
|
||||
|
||||
visitAssignmentOrInfixExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitAssignmentOrInfixExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitInfixOperator(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitInfixOperator(node, ctx);
|
||||
}
|
||||
|
||||
visitLiteral(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitLiteral(node, ctx);
|
||||
}
|
||||
|
||||
visitQualifiedIdentifier(node: ScalaCstNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitQualifiedIdentifier(node, ctx);
|
||||
}
|
||||
|
||||
visitNewExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitNewExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitIfExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitIfExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitWhileExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitWhileExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitTryExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitTryExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitForExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitForExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitGenerator(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitGenerator(node, ctx);
|
||||
}
|
||||
|
||||
visitCaseClause(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitCaseClause(node, ctx);
|
||||
}
|
||||
|
||||
visitBlockExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitBlockExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitPartialFunctionLiteral(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.expressions.visitPartialFunctionLiteral(node, ctx);
|
||||
}
|
||||
|
||||
// Statements
|
||||
visitBlockStatement(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.statements.visitBlockStatement(node, ctx);
|
||||
}
|
||||
|
||||
visitPattern(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.statements.visitPattern(node, ctx);
|
||||
}
|
||||
|
||||
// Scala 3 specific features
|
||||
visitEnumDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitEnumDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitEnumCase(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitEnumCase(node, ctx);
|
||||
}
|
||||
|
||||
visitExtensionDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitExtensionDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitExtensionMember(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitExtensionMember(node, ctx);
|
||||
}
|
||||
|
||||
visitGivenDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitGivenDefinition(node, ctx);
|
||||
}
|
||||
|
||||
visitQuoteExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitQuoteExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitSpliceExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitSpliceExpression(node, ctx);
|
||||
}
|
||||
|
||||
visitPolymorphicFunctionLiteral(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitPolymorphicFunctionLiteral(node, ctx);
|
||||
}
|
||||
|
||||
visitPolymorphicFunctionType(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitPolymorphicFunctionType(node, ctx);
|
||||
}
|
||||
|
||||
visitPolymorphicTypeParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.types.visitPolymorphicTypeParameter(node, ctx);
|
||||
}
|
||||
|
||||
visitContextFunctionType(node: CSTNode, ctx: PrintContext): string {
|
||||
return this.scala3.visitContextFunctionType(node, ctx);
|
||||
}
|
||||
}
|
||||
@@ -1,640 +0,0 @@
|
||||
/**
|
||||
* Declaration visitor methods for class, object, trait, method, and other definitions
|
||||
*/
|
||||
import {
|
||||
formatStatement,
|
||||
getPrintWidth,
|
||||
getChildNodes,
|
||||
getFirstChild,
|
||||
createIndent,
|
||||
getNodeImage,
|
||||
} from "./utils";
|
||||
import type { PrintContext, CSTNode } from "./utils";
|
||||
|
||||
export interface DeclarationVisitor {
|
||||
visit(node: CSTNode, ctx: PrintContext): string;
|
||||
visitModifiers(modifiers: CSTNode[], ctx: PrintContext): string;
|
||||
getIndentation(ctx: PrintContext): string;
|
||||
}
|
||||
|
||||
export class DeclarationVisitorMethods {
|
||||
private visitor: DeclarationVisitor;
|
||||
|
||||
constructor(visitor: DeclarationVisitor) {
|
||||
this.visitor = visitor;
|
||||
}
|
||||
|
||||
visitClassDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Add class keyword (don't duplicate if already handled by modifiers)
|
||||
const classToken = getFirstChild(node, "Class");
|
||||
if (classToken) {
|
||||
result += getNodeImage(classToken) + " ";
|
||||
}
|
||||
|
||||
// Add class name
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
if (identifierToken) {
|
||||
result += getNodeImage(identifierToken);
|
||||
}
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
// Add constructor annotations
|
||||
const annotations = getChildNodes(node, "annotation");
|
||||
if (annotations.length > 0) {
|
||||
result +=
|
||||
" " +
|
||||
annotations
|
||||
.map((ann: CSTNode) => this.visitor.visit(ann, ctx))
|
||||
.join(" ");
|
||||
}
|
||||
|
||||
const classParameters = getFirstChild(node, "classParameters");
|
||||
if (classParameters) {
|
||||
result += this.visitor.visit(classParameters, ctx);
|
||||
}
|
||||
|
||||
const extendsClause = getFirstChild(node, "extendsClause");
|
||||
if (extendsClause) {
|
||||
result += " " + this.visitor.visit(extendsClause, ctx);
|
||||
}
|
||||
|
||||
const classBody = getFirstChild(node, "classBody");
|
||||
if (classBody) {
|
||||
result += " " + this.visitor.visit(classBody, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitObjectDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
let result =
|
||||
"object " + (identifierToken ? getNodeImage(identifierToken) : "");
|
||||
|
||||
const extendsClause = getFirstChild(node, "extendsClause");
|
||||
if (extendsClause) {
|
||||
result += " " + this.visitor.visit(extendsClause, ctx);
|
||||
}
|
||||
|
||||
const classBody = getFirstChild(node, "classBody");
|
||||
if (classBody) {
|
||||
result += " " + this.visitor.visit(classBody, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTraitDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifier = getFirstChild(node, "Identifier");
|
||||
let result = "trait " + (identifier ? getNodeImage(identifier) : "");
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const extendsClause = getFirstChild(node, "extendsClause");
|
||||
if (extendsClause) {
|
||||
result += " " + this.visitor.visit(extendsClause, ctx);
|
||||
}
|
||||
|
||||
const traitBody = getFirstChild(node, "classBody");
|
||||
if (traitBody) {
|
||||
result += " " + this.visitor.visit(traitBody, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitEnumDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
let result =
|
||||
"enum " + (identifierToken ? getNodeImage(identifierToken) : "");
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const classParameters = getFirstChild(node, "classParameters");
|
||||
if (classParameters) {
|
||||
result += this.visitor.visit(classParameters, ctx);
|
||||
}
|
||||
|
||||
const extendsClause = getFirstChild(node, "extendsClause");
|
||||
if (extendsClause) {
|
||||
result += " " + this.visitor.visit(extendsClause, ctx);
|
||||
}
|
||||
|
||||
result += " {\n";
|
||||
|
||||
const enumCases = getChildNodes(node, "enumCase");
|
||||
if (enumCases.length > 0) {
|
||||
const indent = this.visitor.getIndentation(ctx);
|
||||
const cases = enumCases.map(
|
||||
(c: CSTNode) => indent + this.visitor.visit(c, ctx),
|
||||
);
|
||||
result += cases.join("\n");
|
||||
}
|
||||
|
||||
result += "\n}";
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitEnumCase(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
let result =
|
||||
"case " + (identifierToken ? getNodeImage(identifierToken) : "");
|
||||
|
||||
const classParameters = getFirstChild(node, "classParameters");
|
||||
if (classParameters) {
|
||||
result += this.visitor.visit(classParameters, ctx);
|
||||
}
|
||||
|
||||
const extendsClause = getFirstChild(node, "extendsClause");
|
||||
if (extendsClause) {
|
||||
result += " " + this.visitor.visit(extendsClause, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitExtensionDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "extension";
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
result +=
|
||||
" (" + (identifierToken ? getNodeImage(identifierToken) : "") + ": ";
|
||||
if (typeNode) {
|
||||
result += this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
result += ") {\n";
|
||||
|
||||
const extensionMembers = getChildNodes(node, "extensionMember");
|
||||
if (extensionMembers.length > 0) {
|
||||
const members = extensionMembers.map(
|
||||
(m: CSTNode) => " " + this.visitor.visit(m, ctx),
|
||||
);
|
||||
result += members.join("\n");
|
||||
}
|
||||
|
||||
result += "\n}";
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitExtensionMember(node: CSTNode, ctx: PrintContext): string {
|
||||
const modifierNodes = getChildNodes(node, "modifier");
|
||||
const modifiers = this.visitor.visitModifiers(modifierNodes, ctx);
|
||||
|
||||
const defDefinition = getFirstChild(node, "defDefinition");
|
||||
const definition = defDefinition
|
||||
? this.visitor.visit(defDefinition, ctx)
|
||||
: "";
|
||||
|
||||
return modifiers ? modifiers + " " + definition : definition;
|
||||
}
|
||||
|
||||
visitValDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "val ";
|
||||
|
||||
// Handle pattern or identifier
|
||||
const pattern = getFirstChild(node, "pattern");
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
|
||||
if (pattern) {
|
||||
result += this.visitor.visit(pattern, ctx);
|
||||
} else if (identifierToken) {
|
||||
result += getNodeImage(identifierToken);
|
||||
}
|
||||
|
||||
const colonToken = getFirstChild(node, "Colon");
|
||||
if (colonToken) {
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += ": " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
const equalsToken = getFirstChild(node, "Equals");
|
||||
if (equalsToken) {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
result += " = " + this.visitor.visit(expression, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
return formatStatement(result, ctx);
|
||||
}
|
||||
|
||||
visitVarDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
let result =
|
||||
"var " + (identifierToken ? getNodeImage(identifierToken) : "");
|
||||
|
||||
const colonToken = getFirstChild(node, "Colon");
|
||||
if (colonToken) {
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += ": " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
result += " = " + this.visitor.visit(expression, ctx);
|
||||
}
|
||||
|
||||
return formatStatement(result, ctx);
|
||||
}
|
||||
|
||||
visitDefDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "def ";
|
||||
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
const thisToken = getFirstChild(node, "This");
|
||||
|
||||
if (identifierToken) {
|
||||
result += getNodeImage(identifierToken);
|
||||
} else if (thisToken) {
|
||||
result += "this";
|
||||
}
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const parameterLists = getFirstChild(node, "parameterLists");
|
||||
if (parameterLists) {
|
||||
result += this.visitor.visit(parameterLists, ctx);
|
||||
}
|
||||
|
||||
const colonToken = getFirstChild(node, "Colon");
|
||||
if (colonToken) {
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += ": " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
const equalsToken = getFirstChild(node, "Equals");
|
||||
if (equalsToken) {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
result += " = " + this.visitor.visit(expression, ctx);
|
||||
}
|
||||
return formatStatement(result, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitGivenDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "given";
|
||||
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
if (identifierToken) {
|
||||
// Named given with potential parameters: given name[T](using ord: Type): Type
|
||||
result += " " + getNodeImage(identifierToken);
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const parameterLists = getFirstChild(node, "parameterLists");
|
||||
if (parameterLists) {
|
||||
result += this.visitor.visit(parameterLists, ctx);
|
||||
}
|
||||
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += ": " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
} else {
|
||||
// Anonymous given: given Type = expression
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += " " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
const equalsToken = getFirstChild(node, "Equals");
|
||||
if (equalsToken) {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
result += " = " + this.visitor.visit(expression, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTypeDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Handle opaque types
|
||||
const opaqueToken = getFirstChild(node, "Opaque");
|
||||
if (opaqueToken) {
|
||||
result += "opaque ";
|
||||
}
|
||||
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
result += "type " + (identifierToken ? getNodeImage(identifierToken) : "");
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += " = " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitAuxiliaryConstructor(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "def this";
|
||||
|
||||
// CST uses "parameterList" (singular) for auxiliary constructors
|
||||
const parameterList = getFirstChild(node, "parameterList");
|
||||
if (parameterList) {
|
||||
result += this.visitor.visit(parameterList, ctx);
|
||||
}
|
||||
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
result += " = " + this.visitor.visit(expression, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitClassParameters(node: CSTNode, ctx: PrintContext): string {
|
||||
const params = getChildNodes(node, "classParameter");
|
||||
if (params.length === 0) {
|
||||
return "()";
|
||||
}
|
||||
|
||||
const paramStrings = params.map((p: CSTNode) => this.visitor.visit(p, ctx));
|
||||
const printWidth = getPrintWidth(ctx);
|
||||
|
||||
// Check if single line is appropriate
|
||||
const singleLine = `(${paramStrings.join(", ")})`;
|
||||
// Use single line if it fits within printWidth and is reasonably short
|
||||
if (
|
||||
params.length === 1 &&
|
||||
singleLine.length <= Math.min(printWidth * 0.6, 40)
|
||||
) {
|
||||
return singleLine;
|
||||
}
|
||||
|
||||
// Use multi-line format for multiple parameters or long single parameter
|
||||
const indent = this.visitor.getIndentation(ctx);
|
||||
|
||||
// Format each parameter on its own line without trailing comma for class parameters
|
||||
const indentedParams = paramStrings.map((param: string) => indent + param);
|
||||
return `(\n${indentedParams.join(",\n")}\n)`;
|
||||
}
|
||||
|
||||
visitClassParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
const modifierNodes = getChildNodes(node, "modifier");
|
||||
if (modifierNodes.length > 0) {
|
||||
const modifiers = this.visitor.visitModifiers(modifierNodes, ctx);
|
||||
result += modifiers + " ";
|
||||
}
|
||||
|
||||
const valToken = getFirstChild(node, "Val");
|
||||
const varToken = getFirstChild(node, "Var");
|
||||
|
||||
if (valToken) {
|
||||
result += "val ";
|
||||
} else if (varToken) {
|
||||
result += "var ";
|
||||
}
|
||||
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
if (identifierToken) {
|
||||
result += getNodeImage(identifierToken);
|
||||
}
|
||||
result += ": ";
|
||||
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
|
||||
const equalsToken = getFirstChild(node, "Equals");
|
||||
if (equalsToken) {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
result += " = " + this.visitor.visit(expression, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitParameterLists(node: CSTNode, ctx: PrintContext): string {
|
||||
const parameterLists = getChildNodes(node, "parameterList");
|
||||
return parameterLists
|
||||
.map((list: CSTNode) => this.visitor.visit(list, ctx))
|
||||
.join("");
|
||||
}
|
||||
|
||||
visitParameterList(node: CSTNode, ctx: PrintContext): string {
|
||||
const params = getChildNodes(node, "parameter");
|
||||
if (params.length === 0) {
|
||||
return "()";
|
||||
}
|
||||
|
||||
const paramStrings = params.map((p: CSTNode) => this.visitor.visit(p, ctx));
|
||||
return "(" + paramStrings.join(", ") + ")";
|
||||
}
|
||||
|
||||
visitParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
const usingToken = getFirstChild(node, "Using");
|
||||
const implicitToken = getFirstChild(node, "Implicit");
|
||||
|
||||
if (usingToken) {
|
||||
result += "using ";
|
||||
} else if (implicitToken) {
|
||||
result += "implicit ";
|
||||
}
|
||||
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
if (identifierToken) {
|
||||
result += getNodeImage(identifierToken);
|
||||
}
|
||||
result += ": ";
|
||||
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
|
||||
const equalsToken = getFirstChild(node, "Equals");
|
||||
if (equalsToken) {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
result += " = " + this.visitor.visit(expression, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTypeParameters(node: CSTNode, ctx: PrintContext): string {
|
||||
const params = getChildNodes(node, "typeParameter");
|
||||
if (params.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const paramStrings = params.map((p: CSTNode) => this.visitor.visit(p, ctx));
|
||||
return "[" + paramStrings.join(", ") + "]";
|
||||
}
|
||||
|
||||
visitTypeParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Handle variance annotations
|
||||
const plusToken = getFirstChild(node, "Plus");
|
||||
const minusToken = getFirstChild(node, "Minus");
|
||||
|
||||
if (plusToken) {
|
||||
result += "+";
|
||||
} else if (minusToken) {
|
||||
result += "-";
|
||||
}
|
||||
|
||||
const identifierToken = getFirstChild(node, "Identifier");
|
||||
if (identifierToken) {
|
||||
result += getNodeImage(identifierToken);
|
||||
}
|
||||
|
||||
// Add bounds
|
||||
const subtypeOfToken = getFirstChild(node, "SubtypeOf");
|
||||
const supertypeOfToken = getFirstChild(node, "SupertypeOf");
|
||||
const typeNodes = getChildNodes(node, "type");
|
||||
|
||||
if (subtypeOfToken && typeNodes.length > 0) {
|
||||
result += " <: " + this.visitor.visit(typeNodes[0], ctx);
|
||||
}
|
||||
if (supertypeOfToken && typeNodes.length > 1) {
|
||||
result += " >: " + this.visitor.visit(typeNodes[1], ctx);
|
||||
} else if (supertypeOfToken && typeNodes.length === 1 && !subtypeOfToken) {
|
||||
result += " >: " + this.visitor.visit(typeNodes[0], ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitExtendsClause(node: CSTNode, ctx: PrintContext): string {
|
||||
const typeNodes = getChildNodes(node, "type");
|
||||
if (typeNodes.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
let result = "extends " + this.visitor.visit(typeNodes[0], ctx);
|
||||
|
||||
const withToken = getFirstChild(node, "With");
|
||||
if (withToken && typeNodes.length > 1) {
|
||||
const withTypes = typeNodes
|
||||
.slice(1)
|
||||
.map((t: CSTNode) => this.visitor.visit(t, ctx));
|
||||
result += " with " + withTypes.join(" with ");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitClassBody(node: CSTNode, ctx: PrintContext): string {
|
||||
const classMembers = getChildNodes(node, "classMember");
|
||||
if (classMembers.length === 0) {
|
||||
return "{}";
|
||||
}
|
||||
|
||||
// Increase indent level for class members
|
||||
const nestedCtx = {
|
||||
...ctx,
|
||||
indentLevel: ctx.indentLevel + 1,
|
||||
};
|
||||
|
||||
const members = classMembers.map((m: CSTNode) =>
|
||||
this.visitor.visit(m, nestedCtx),
|
||||
);
|
||||
|
||||
const indent = createIndent(1, ctx);
|
||||
return "{\n" + members.map((m: string) => indent + m).join("\n") + "\n}";
|
||||
}
|
||||
|
||||
visitClassMember(node: CSTNode, ctx: PrintContext): string {
|
||||
// Handle different types of class members
|
||||
const defDefinition = getFirstChild(node, "defDefinition");
|
||||
if (defDefinition) {
|
||||
return this.visitor.visit(defDefinition, ctx);
|
||||
}
|
||||
|
||||
const auxiliaryConstructor = getFirstChild(node, "auxiliaryConstructor");
|
||||
if (auxiliaryConstructor) {
|
||||
return this.visitor.visit(auxiliaryConstructor, ctx);
|
||||
}
|
||||
|
||||
const valDefinition = getFirstChild(node, "valDefinition");
|
||||
if (valDefinition) {
|
||||
return this.visitor.visit(valDefinition, ctx);
|
||||
}
|
||||
|
||||
const varDefinition = getFirstChild(node, "varDefinition");
|
||||
if (varDefinition) {
|
||||
return this.visitor.visit(varDefinition, ctx);
|
||||
}
|
||||
|
||||
const classDefinition = getFirstChild(node, "classDefinition");
|
||||
if (classDefinition) {
|
||||
return this.visitor.visit(classDefinition, ctx);
|
||||
}
|
||||
|
||||
const objectDefinition = getFirstChild(node, "objectDefinition");
|
||||
if (objectDefinition) {
|
||||
return this.visitor.visit(objectDefinition, ctx);
|
||||
}
|
||||
|
||||
const traitDefinition = getFirstChild(node, "traitDefinition");
|
||||
if (traitDefinition) {
|
||||
return this.visitor.visit(traitDefinition, ctx);
|
||||
}
|
||||
|
||||
const typeDefinition = getFirstChild(node, "typeDefinition");
|
||||
if (typeDefinition) {
|
||||
return this.visitor.visit(typeDefinition, ctx);
|
||||
}
|
||||
|
||||
const definition = getFirstChild(node, "definition");
|
||||
if (definition) {
|
||||
return this.visitor.visit(definition, ctx);
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
}
|
||||
@@ -1,836 +0,0 @@
|
||||
/**
|
||||
* Expression visitor methods for handling various expression types
|
||||
*/
|
||||
import {
|
||||
formatStringLiteral,
|
||||
getChildNodes,
|
||||
getFirstChild,
|
||||
createIndent,
|
||||
getNodeImage,
|
||||
} from "./utils";
|
||||
import type { PrintContext, CSTNode } from "./utils";
|
||||
|
||||
export interface ExpressionVisitor {
|
||||
visit(node: CSTNode, ctx: PrintContext): string;
|
||||
}
|
||||
|
||||
export class ExpressionVisitorMethods {
|
||||
private visitor: ExpressionVisitor;
|
||||
|
||||
constructor(visitor: ExpressionVisitor) {
|
||||
this.visitor = visitor;
|
||||
}
|
||||
|
||||
visitExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
// Handle PartialFunction literals: { case ... }
|
||||
const partialFunctionLiteral = getFirstChild(
|
||||
node,
|
||||
"partialFunctionLiteral",
|
||||
);
|
||||
if (partialFunctionLiteral) {
|
||||
return this.visitor.visit(partialFunctionLiteral, ctx);
|
||||
}
|
||||
|
||||
// Handle lambda expressions with parameter list: (x: Int, y: Int) => x + y
|
||||
const parameterList = getFirstChild(node, "parameterList");
|
||||
const arrow = getChildNodes(node, "Arrow");
|
||||
if (parameterList && arrow.length > 0) {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
return (
|
||||
this.visitor.visit(parameterList, ctx) +
|
||||
" => " +
|
||||
(expression ? this.visitor.visit(expression, ctx) : "")
|
||||
);
|
||||
}
|
||||
|
||||
// Handle block lambda expressions: { x => ... }
|
||||
const leftBrace = getChildNodes(node, "LeftBrace");
|
||||
const identifier = getChildNodes(node, "Identifier");
|
||||
const arrowNodes = getChildNodes(node, "Arrow");
|
||||
|
||||
if (
|
||||
leftBrace.length > 0 &&
|
||||
identifier.length > 0 &&
|
||||
arrowNodes.length > 0
|
||||
) {
|
||||
let result = "{ " + getNodeImage(identifier[0]) + " =>";
|
||||
|
||||
const statements: string[] = [];
|
||||
|
||||
// Create nested context for lambda body
|
||||
const nestedCtx = {
|
||||
...ctx,
|
||||
indentLevel: ctx.indentLevel + 1,
|
||||
};
|
||||
|
||||
// Add statements (val/var/def definitions)
|
||||
const blockStatements = getChildNodes(node, "blockStatement");
|
||||
if (blockStatements.length > 0) {
|
||||
statements.push(
|
||||
...blockStatements.map((stmt: CSTNode) =>
|
||||
this.visitor.visit(stmt, nestedCtx),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Add final expression
|
||||
const finalExpression = getFirstChild(node, "expression");
|
||||
if (finalExpression) {
|
||||
statements.push(this.visitor.visit(finalExpression, nestedCtx));
|
||||
}
|
||||
|
||||
if (statements.length === 0) {
|
||||
result += " }";
|
||||
} else if (statements.length === 1) {
|
||||
// Single expression - keep on same line if short
|
||||
const stmt = statements[0];
|
||||
if (stmt.length < 50) {
|
||||
result += " " + stmt + " }";
|
||||
} else {
|
||||
const indent = createIndent(1, ctx);
|
||||
result += "\n" + indent + stmt + "\n}";
|
||||
}
|
||||
} else {
|
||||
// Multiple statements - use multiple lines
|
||||
const indent = createIndent(1, ctx);
|
||||
const indentedStmts = statements.map((stmt) => indent + stmt);
|
||||
result += "\n" + indentedStmts.join("\n") + "\n}";
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle polymorphic function literal: [T] => (x: T) => x
|
||||
const polymorphicFunctionLiteral = getFirstChild(
|
||||
node,
|
||||
"polymorphicFunctionLiteral",
|
||||
);
|
||||
if (polymorphicFunctionLiteral) {
|
||||
return this.visitor.visit(polymorphicFunctionLiteral, ctx);
|
||||
}
|
||||
|
||||
// Handle simple lambda expressions: x => x * 2
|
||||
const simpleIdentifier = getChildNodes(node, "Identifier");
|
||||
const simpleArrow = getChildNodes(node, "Arrow");
|
||||
if (simpleIdentifier.length > 0 && simpleArrow.length > 0) {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
return (
|
||||
getNodeImage(simpleIdentifier[0]) +
|
||||
" => " +
|
||||
(expression ? this.visitor.visit(expression, ctx) : "")
|
||||
);
|
||||
}
|
||||
|
||||
// Handle assignmentOrInfixExpression
|
||||
const assignmentOrInfixExpression = getFirstChild(
|
||||
node,
|
||||
"assignmentOrInfixExpression",
|
||||
);
|
||||
if (assignmentOrInfixExpression) {
|
||||
return this.visitor.visit(assignmentOrInfixExpression, ctx);
|
||||
}
|
||||
|
||||
// Handle regular expressions (fallback for older structure)
|
||||
const postfixExpressions = getChildNodes(node, "postfixExpression");
|
||||
if (postfixExpressions.length > 0) {
|
||||
let result = this.visitor.visit(postfixExpressions[0], ctx);
|
||||
|
||||
const infixOperators = getChildNodes(node, "infixOperator");
|
||||
if (infixOperators.length > 0) {
|
||||
for (let i = 0; i < infixOperators.length; i++) {
|
||||
result +=
|
||||
" " +
|
||||
this.visitor.visit(infixOperators[i], ctx) +
|
||||
" " +
|
||||
(postfixExpressions[i + 1]
|
||||
? this.visitor.visit(postfixExpressions[i + 1], ctx)
|
||||
: "");
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
visitPostfixExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const primaryExpression = getFirstChild(node, "primaryExpression");
|
||||
let result = primaryExpression
|
||||
? this.visitor.visit(primaryExpression, ctx)
|
||||
: "";
|
||||
|
||||
// Handle method calls and member access
|
||||
const dots = getChildNodes(node, "Dot");
|
||||
if (dots.length > 0) {
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
|
||||
for (let i = 0; i < dots.length; i++) {
|
||||
result += ".";
|
||||
|
||||
// Handle member access or method call
|
||||
// Identifiers after the first one correspond to members after dots
|
||||
if (identifiers.length > i) {
|
||||
result += getNodeImage(identifiers[i]);
|
||||
}
|
||||
|
||||
// Add arguments if this is a method call
|
||||
const leftParens = getChildNodes(node, "LeftParen");
|
||||
if (leftParens.length > i) {
|
||||
result += "(";
|
||||
|
||||
// Find expressions for this argument list
|
||||
const startIdx = i * 10; // Rough heuristic for argument grouping
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
const relevantExpressions = expressions.slice(
|
||||
startIdx,
|
||||
startIdx + 10,
|
||||
);
|
||||
|
||||
if (relevantExpressions.length > 0) {
|
||||
const args = relevantExpressions.map((e: CSTNode) =>
|
||||
this.visitor.visit(e, ctx),
|
||||
);
|
||||
result += args.join(", ");
|
||||
}
|
||||
|
||||
result += ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle type arguments
|
||||
const leftBrackets = getChildNodes(node, "LeftBracket");
|
||||
if (leftBrackets.length > 0) {
|
||||
result += "[";
|
||||
const types = getChildNodes(node, "type");
|
||||
if (types.length > 0) {
|
||||
const typeStrings = types.map((t: CSTNode) =>
|
||||
this.visitor.visit(t, ctx),
|
||||
);
|
||||
result += typeStrings.join(", ");
|
||||
}
|
||||
result += "]";
|
||||
}
|
||||
|
||||
// Handle match expressions
|
||||
const matchTokens = getChildNodes(node, "Match");
|
||||
if (matchTokens.length > 0) {
|
||||
result += " match {\n";
|
||||
const caseClauses = getChildNodes(node, "caseClause");
|
||||
if (caseClauses.length > 0) {
|
||||
const cases = caseClauses.map(
|
||||
(c: CSTNode) => " " + this.visitor.visit(c, ctx),
|
||||
);
|
||||
result += cases.join("\n");
|
||||
result += "\n";
|
||||
}
|
||||
result += "}";
|
||||
}
|
||||
|
||||
// Handle method application without dot
|
||||
const methodLeftParens = getChildNodes(node, "LeftParen");
|
||||
const methodDots = getChildNodes(node, "Dot");
|
||||
if (methodLeftParens.length > 0 && methodDots.length === 0) {
|
||||
result += "(";
|
||||
const methodExpressions = getChildNodes(node, "expression");
|
||||
if (methodExpressions.length > 0) {
|
||||
const args = methodExpressions.map((e: CSTNode) =>
|
||||
this.visitor.visit(e, ctx),
|
||||
);
|
||||
result += args.join(", ");
|
||||
}
|
||||
result += ")";
|
||||
}
|
||||
|
||||
// Handle block lambda expressions: method { param => ... }
|
||||
const leftBrace = getChildNodes(node, "LeftBrace");
|
||||
const arrowNodes = getChildNodes(node, "Arrow");
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
|
||||
if (
|
||||
leftBrace.length > 0 &&
|
||||
arrowNodes.length > 0 &&
|
||||
identifiers.length > 1
|
||||
) {
|
||||
// The lambda parameter is the second identifier (first is method name)
|
||||
const lambdaParam = getNodeImage(identifiers[1]);
|
||||
result += " { " + lambdaParam + " =>";
|
||||
|
||||
// Create nested context for lambda body
|
||||
const nestedCtx = {
|
||||
...ctx,
|
||||
indentLevel: ctx.indentLevel + 1,
|
||||
};
|
||||
|
||||
// Process block statements
|
||||
const blockStatements = getChildNodes(node, "blockStatement");
|
||||
const statements: string[] = [];
|
||||
|
||||
for (const stmt of blockStatements) {
|
||||
statements.push(this.visitor.visit(stmt, nestedCtx));
|
||||
}
|
||||
|
||||
if (statements.length === 0) {
|
||||
result += " }";
|
||||
} else if (statements.length === 1) {
|
||||
// Single statement - keep on same line if short
|
||||
const stmt = statements[0];
|
||||
if (stmt.length < 50) {
|
||||
result += " " + stmt + " }";
|
||||
} else {
|
||||
const indent = createIndent(1, ctx);
|
||||
result += "\n" + indent + stmt + "\n}";
|
||||
}
|
||||
} else {
|
||||
// Multiple statements - use multiple lines
|
||||
const indent = createIndent(1, ctx);
|
||||
const indentedStmts = statements.map((stmt) => indent + stmt);
|
||||
result += "\n" + indentedStmts.join("\n") + "\n}";
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitPrimaryExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const literal = getFirstChild(node, "literal");
|
||||
if (literal) {
|
||||
return this.visitor.visit(literal, ctx);
|
||||
}
|
||||
|
||||
const identifier = getFirstChild(node, "Identifier");
|
||||
if (identifier) {
|
||||
return getNodeImage(identifier);
|
||||
}
|
||||
|
||||
const thisToken = getChildNodes(node, "This");
|
||||
if (thisToken.length > 0) {
|
||||
return "this";
|
||||
}
|
||||
|
||||
const partialFunctionLiteral = getFirstChild(
|
||||
node,
|
||||
"partialFunctionLiteral",
|
||||
);
|
||||
if (partialFunctionLiteral) {
|
||||
return this.visitor.visit(partialFunctionLiteral, ctx);
|
||||
}
|
||||
|
||||
const newExpression = getFirstChild(node, "newExpression");
|
||||
if (newExpression) {
|
||||
return this.visitor.visit(newExpression, ctx);
|
||||
}
|
||||
|
||||
const forExpression = getFirstChild(node, "forExpression");
|
||||
if (forExpression) {
|
||||
return this.visitor.visit(forExpression, ctx);
|
||||
}
|
||||
|
||||
const ifExpression = getFirstChild(node, "ifExpression");
|
||||
if (ifExpression) {
|
||||
return this.visitor.visit(ifExpression, ctx);
|
||||
}
|
||||
|
||||
const whileExpression = getFirstChild(node, "whileExpression");
|
||||
if (whileExpression) {
|
||||
return this.visitor.visit(whileExpression, ctx);
|
||||
}
|
||||
|
||||
const tryExpression = getFirstChild(node, "tryExpression");
|
||||
if (tryExpression) {
|
||||
return this.visitor.visit(tryExpression, ctx);
|
||||
}
|
||||
|
||||
const exclamation = getChildNodes(node, "Exclamation");
|
||||
if (exclamation.length > 0) {
|
||||
// Handle negation operator
|
||||
const postfixExpression = getFirstChild(node, "postfixExpression");
|
||||
if (postfixExpression) {
|
||||
const result = this.visitor.visit(postfixExpression, ctx);
|
||||
return "!" + result;
|
||||
}
|
||||
return "!";
|
||||
}
|
||||
|
||||
const bitwiseTilde = getChildNodes(node, "BitwiseTilde");
|
||||
if (bitwiseTilde.length > 0) {
|
||||
// Handle bitwise complement operator
|
||||
const postfixExpression = getFirstChild(node, "postfixExpression");
|
||||
return (
|
||||
"~" +
|
||||
(postfixExpression ? this.visitor.visit(postfixExpression, ctx) : "")
|
||||
);
|
||||
}
|
||||
|
||||
const leftParen = getChildNodes(node, "LeftParen");
|
||||
if (leftParen.length > 0) {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
const assignmentOrInfixExpression = getFirstChild(
|
||||
node,
|
||||
"assignmentOrInfixExpression",
|
||||
);
|
||||
|
||||
// Try expression first, then assignmentOrInfixExpression
|
||||
const content = expression
|
||||
? this.visitor.visit(expression, ctx)
|
||||
: assignmentOrInfixExpression
|
||||
? this.visitor.visit(assignmentOrInfixExpression, ctx)
|
||||
: "";
|
||||
|
||||
return "(" + content + ")";
|
||||
}
|
||||
|
||||
const blockExpression = getFirstChild(node, "blockExpression");
|
||||
if (blockExpression) {
|
||||
return this.visitor.visit(blockExpression, ctx);
|
||||
}
|
||||
|
||||
const quoteExpression = getFirstChild(node, "quoteExpression");
|
||||
if (quoteExpression) {
|
||||
return this.visitor.visit(quoteExpression, ctx);
|
||||
}
|
||||
|
||||
const spliceExpression = getFirstChild(node, "spliceExpression");
|
||||
if (spliceExpression) {
|
||||
return this.visitor.visit(spliceExpression, ctx);
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
visitAssignmentOrInfixExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const postfixExpressions = getChildNodes(node, "postfixExpression");
|
||||
let result =
|
||||
postfixExpressions.length > 0
|
||||
? this.visitor.visit(postfixExpressions[0], ctx)
|
||||
: "";
|
||||
|
||||
// Handle assignment operators (including named arguments)
|
||||
const equals = getChildNodes(node, "Equals");
|
||||
const plusEquals = getChildNodes(node, "PlusEquals");
|
||||
const minusEquals = getChildNodes(node, "MinusEquals");
|
||||
const starEquals = getChildNodes(node, "StarEquals");
|
||||
const slashEquals = getChildNodes(node, "SlashEquals");
|
||||
const percentEquals = getChildNodes(node, "PercentEquals");
|
||||
const sbtAssign = getChildNodes(node, "SbtAssign");
|
||||
|
||||
const operator =
|
||||
equals[0] ||
|
||||
plusEquals[0] ||
|
||||
minusEquals[0] ||
|
||||
starEquals[0] ||
|
||||
slashEquals[0] ||
|
||||
percentEquals[0] ||
|
||||
sbtAssign[0];
|
||||
|
||||
if (operator) {
|
||||
result += " " + getNodeImage(operator) + " ";
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
if (expressions.length > 0) {
|
||||
result += this.visitor.visit(expressions[0], ctx);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle infix operators
|
||||
const infixOperators = getChildNodes(node, "infixOperator");
|
||||
if (infixOperators.length > 0) {
|
||||
for (let i = 0; i < infixOperators.length; i++) {
|
||||
result += " " + this.visitor.visit(infixOperators[i], ctx) + " ";
|
||||
if (postfixExpressions.length > i + 1) {
|
||||
result += this.visitor.visit(postfixExpressions[i + 1], ctx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
visitInfixOperator(node: CSTNode, _ctx: PrintContext): string {
|
||||
// Handle all possible infix operators
|
||||
const operators = [
|
||||
"Plus",
|
||||
"Minus",
|
||||
"Star",
|
||||
"Slash",
|
||||
"Percent",
|
||||
"DoubleStar",
|
||||
"LeftShift",
|
||||
"RightShift",
|
||||
"UnsignedRightShift",
|
||||
"BitwiseAnd",
|
||||
"BitwiseOr",
|
||||
"BitwiseXor",
|
||||
"EqualsEquals",
|
||||
"NotEquals",
|
||||
"LessThan",
|
||||
"LessThanOrEqual",
|
||||
"GreaterThan",
|
||||
"GreaterThanOrEqual",
|
||||
"LogicalAnd",
|
||||
"LogicalOr",
|
||||
"DoublePercent",
|
||||
"Ask",
|
||||
"To",
|
||||
"Until",
|
||||
"PrependOp",
|
||||
"AppendOp",
|
||||
"ConcatOp",
|
||||
"RightArrow",
|
||||
];
|
||||
|
||||
for (const op of operators) {
|
||||
const tokens = getChildNodes(node, op);
|
||||
if (tokens.length > 0) {
|
||||
return getNodeImage(tokens[0]);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to identifier for custom operators
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length > 0) {
|
||||
return getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
visitLiteral(node: CSTNode, ctx: PrintContext): string {
|
||||
// Handle all possible literal types
|
||||
const literalTypes = [
|
||||
"StringLiteral",
|
||||
"InterpolatedStringLiteral",
|
||||
"IntegerLiteral",
|
||||
"NumberLiteral",
|
||||
"FloatLiteral",
|
||||
"BooleanLiteral",
|
||||
"True",
|
||||
"False",
|
||||
"CharLiteral",
|
||||
"NullLiteral",
|
||||
"Null",
|
||||
"ScientificNumber",
|
||||
];
|
||||
|
||||
for (const literalType of literalTypes) {
|
||||
const tokens = getChildNodes(node, literalType);
|
||||
if (tokens.length > 0) {
|
||||
const tokenImage = getNodeImage(tokens[0]);
|
||||
|
||||
// Apply singleQuote formatting to string literals
|
||||
if (tokenImage.startsWith('"') || tokenImage.startsWith("'")) {
|
||||
return formatStringLiteral(tokenImage, ctx);
|
||||
}
|
||||
|
||||
return tokenImage;
|
||||
}
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
visitQualifiedIdentifier(node: CSTNode, _ctx: PrintContext): string {
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
let result = getNodeImage(identifiers[0]);
|
||||
|
||||
const dots = getChildNodes(node, "Dot");
|
||||
if (dots.length > 0) {
|
||||
// Handle mixed identifiers and type keywords
|
||||
const types = getChildNodes(node, "Type");
|
||||
|
||||
for (let i = 0; i < dots.length; i++) {
|
||||
result += ".";
|
||||
|
||||
// Determine which token comes next (identifier or type keyword)
|
||||
if (i + 1 < identifiers.length) {
|
||||
result += getNodeImage(identifiers[i + 1]);
|
||||
} else if (types.length > 0) {
|
||||
// Use the type keyword (e.g., "type" for .type syntax)
|
||||
result += getNodeImage(types[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitNewExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
let result = "new " + (typeNode ? this.visitor.visit(typeNode, ctx) : "");
|
||||
|
||||
const leftParens = getChildNodes(node, "LeftParen");
|
||||
if (leftParens.length > 0) {
|
||||
result += "(";
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
if (expressions.length > 0) {
|
||||
const args = expressions.map((e: CSTNode) =>
|
||||
this.visitor.visit(e, ctx),
|
||||
);
|
||||
result += args.join(", ");
|
||||
}
|
||||
result += ")";
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitIfExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
if (expressions.length < 2) {
|
||||
return "if";
|
||||
}
|
||||
|
||||
let result = "if (";
|
||||
result += this.visitor.visit(expressions[0], ctx);
|
||||
result += ") ";
|
||||
result += this.visitor.visit(expressions[1], ctx);
|
||||
|
||||
const elseTokens = getChildNodes(node, "Else");
|
||||
if (elseTokens.length > 0 && expressions.length > 2) {
|
||||
result += " else ";
|
||||
result += this.visitor.visit(expressions[2], ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitWhileExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
if (expressions.length < 2) {
|
||||
return "while";
|
||||
}
|
||||
|
||||
let result = "while (";
|
||||
result += this.visitor.visit(expressions[0], ctx);
|
||||
result += ") ";
|
||||
result += this.visitor.visit(expressions[1], ctx);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTryExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
if (expressions.length === 0) {
|
||||
return "try";
|
||||
}
|
||||
|
||||
let result = "try ";
|
||||
result += this.visitor.visit(expressions[0], ctx);
|
||||
|
||||
const catchTokens = getChildNodes(node, "Catch");
|
||||
if (catchTokens.length > 0) {
|
||||
result += " catch {\n";
|
||||
const caseClauses = getChildNodes(node, "caseClause");
|
||||
if (caseClauses.length > 0) {
|
||||
const cases = caseClauses.map(
|
||||
(c: CSTNode) => " " + this.visitor.visit(c, ctx),
|
||||
);
|
||||
result += cases.join("\n");
|
||||
}
|
||||
result += "\n}";
|
||||
}
|
||||
|
||||
const finallyTokens = getChildNodes(node, "Finally");
|
||||
if (finallyTokens.length > 0) {
|
||||
result += " finally ";
|
||||
// If there's a catch block, expression[1] is the finally expression
|
||||
// Otherwise, expression[1] would be the finally expression (no catch)
|
||||
const finallyExprIndex = catchTokens.length > 0 ? 1 : 1;
|
||||
if (expressions.length > finallyExprIndex) {
|
||||
result += this.visitor.visit(expressions[finallyExprIndex], ctx);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitForExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "for ";
|
||||
|
||||
const leftParens = getChildNodes(node, "LeftParen");
|
||||
const leftBraces = getChildNodes(node, "LeftBrace");
|
||||
const generators = getChildNodes(node, "generator");
|
||||
|
||||
if (leftParens.length > 0) {
|
||||
result += "(";
|
||||
if (generators.length > 0) {
|
||||
const gens = generators.map((g: CSTNode) => this.visitor.visit(g, ctx));
|
||||
result += gens.join("; ");
|
||||
}
|
||||
result += ")";
|
||||
} else if (leftBraces.length > 0) {
|
||||
result += "{\n";
|
||||
if (generators.length > 0) {
|
||||
const gens = generators.map(
|
||||
(g: CSTNode) => " " + this.visitor.visit(g, ctx),
|
||||
);
|
||||
result += gens.join("\n");
|
||||
}
|
||||
result += "\n}";
|
||||
}
|
||||
|
||||
const yieldTokens = getChildNodes(node, "Yield");
|
||||
if (yieldTokens.length > 0) {
|
||||
result += " yield ";
|
||||
} else {
|
||||
result += " ";
|
||||
}
|
||||
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
if (expressions.length > 0) {
|
||||
result += this.visitor.visit(expressions[0], ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitGenerator(node: CSTNode, ctx: PrintContext): string {
|
||||
const patterns = getChildNodes(node, "pattern");
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
|
||||
if (patterns.length === 0 || expressions.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
let result = this.visitor.visit(patterns[0], ctx);
|
||||
result += " <- " + this.visitor.visit(expressions[0], ctx);
|
||||
|
||||
const ifTokens = getChildNodes(node, "If");
|
||||
if (ifTokens.length > 0) {
|
||||
for (let i = 0; i < ifTokens.length; i++) {
|
||||
if (expressions.length > i + 1) {
|
||||
result += " if " + this.visitor.visit(expressions[i + 1], ctx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitCaseClause(node: CSTNode, ctx: PrintContext): string {
|
||||
const patterns = getChildNodes(node, "pattern");
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
|
||||
if (patterns.length === 0) {
|
||||
return "case";
|
||||
}
|
||||
|
||||
let result = "case " + this.visitor.visit(patterns[0], ctx);
|
||||
|
||||
const ifTokens = getChildNodes(node, "If");
|
||||
if (ifTokens.length > 0 && expressions.length > 0) {
|
||||
result += " if " + this.visitor.visit(expressions[0], ctx);
|
||||
}
|
||||
|
||||
const expressionIndex = ifTokens.length > 0 ? 1 : 0;
|
||||
if (expressions.length > expressionIndex) {
|
||||
result += " => " + this.visitor.visit(expressions[expressionIndex], ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitBlockExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const blockStatements = getChildNodes(node, "blockStatement");
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
|
||||
if (blockStatements.length === 0 && expressions.length === 0) {
|
||||
return "{}";
|
||||
}
|
||||
|
||||
let result = "{\n";
|
||||
const statements: string[] = [];
|
||||
|
||||
// Create nested context for block contents
|
||||
const nestedCtx = {
|
||||
...ctx,
|
||||
indentLevel: ctx.indentLevel + 1,
|
||||
};
|
||||
|
||||
if (blockStatements.length > 0) {
|
||||
statements.push(
|
||||
...blockStatements.map((stmt: CSTNode) =>
|
||||
this.visitor.visit(stmt, nestedCtx),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (expressions.length > 0) {
|
||||
statements.push(this.visitor.visit(expressions[0], nestedCtx));
|
||||
}
|
||||
|
||||
const indent = createIndent(1, ctx);
|
||||
result += statements.map((stmt) => indent + stmt).join("\n");
|
||||
|
||||
result += "\n}";
|
||||
return result;
|
||||
}
|
||||
|
||||
visitPartialFunctionLiteral(node: CSTNode, ctx: PrintContext): string {
|
||||
const caseClauses = getChildNodes(node, "caseClause");
|
||||
|
||||
if (caseClauses.length === 0) {
|
||||
return "{}";
|
||||
}
|
||||
|
||||
// Single case - try to format on one line if short
|
||||
if (caseClauses.length === 1) {
|
||||
const caseStr = this.visitor.visit(caseClauses[0], ctx);
|
||||
if (caseStr.length < 50) {
|
||||
return `{ ${caseStr} }`;
|
||||
}
|
||||
}
|
||||
|
||||
// Multi-line format for long cases or multiple cases
|
||||
let result = "{\n";
|
||||
const cases = caseClauses.map(
|
||||
(c: CSTNode) => " " + this.visitor.visit(c, ctx),
|
||||
);
|
||||
result += cases.join("\n");
|
||||
result += "\n}";
|
||||
return result;
|
||||
}
|
||||
|
||||
visitAssignmentStatement(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
let result = getNodeImage(identifiers[0]);
|
||||
|
||||
// Find the assignment operator
|
||||
const equals = getChildNodes(node, "Equals");
|
||||
const plusEquals = getChildNodes(node, "PlusEquals");
|
||||
const minusEquals = getChildNodes(node, "MinusEquals");
|
||||
const starEquals = getChildNodes(node, "StarEquals");
|
||||
const slashEquals = getChildNodes(node, "SlashEquals");
|
||||
const percentEquals = getChildNodes(node, "PercentEquals");
|
||||
const sbtAssign = getChildNodes(node, "SbtAssign");
|
||||
|
||||
const operator =
|
||||
equals[0] ||
|
||||
plusEquals[0] ||
|
||||
minusEquals[0] ||
|
||||
starEquals[0] ||
|
||||
slashEquals[0] ||
|
||||
percentEquals[0] ||
|
||||
sbtAssign[0];
|
||||
|
||||
if (operator) {
|
||||
result += " " + getNodeImage(operator) + " ";
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
if (expressions.length > 0) {
|
||||
result += this.visitor.visit(expressions[0], ctx);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -1,433 +0,0 @@
|
||||
/**
|
||||
* Scala 3 specific visitor methods for modern language features
|
||||
*/
|
||||
import { getChildNodes, getFirstChild, getNodeImage } from "./utils";
|
||||
import type { PrintContext, CSTNode } from "./utils";
|
||||
|
||||
export interface Scala3Visitor {
|
||||
visit(node: CSTNode, ctx: PrintContext): string;
|
||||
getIndentation(ctx: PrintContext): string;
|
||||
visitModifiers(modifiers: CSTNode[], ctx: PrintContext): string;
|
||||
}
|
||||
|
||||
export class Scala3VisitorMethods {
|
||||
private visitor: Scala3Visitor;
|
||||
|
||||
constructor(visitor: Scala3Visitor) {
|
||||
this.visitor = visitor;
|
||||
}
|
||||
|
||||
// Quote and splice expressions for macros
|
||||
visitQuoteExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
return (
|
||||
"'{ " + (expression ? this.visitor.visit(expression, ctx) : "") + " }"
|
||||
);
|
||||
}
|
||||
|
||||
visitSpliceExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
return (
|
||||
"${ " + (expression ? this.visitor.visit(expression, ctx) : "") + " }"
|
||||
);
|
||||
}
|
||||
|
||||
// Polymorphic function literals
|
||||
visitPolymorphicFunctionLiteral(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "[";
|
||||
|
||||
const polymorphicTypeParams = getChildNodes(
|
||||
node,
|
||||
"polymorphicTypeParameter",
|
||||
);
|
||||
if (polymorphicTypeParams.length > 0) {
|
||||
const parameters = polymorphicTypeParams.map((param: CSTNode) =>
|
||||
this.visitor.visit(param, ctx),
|
||||
);
|
||||
result += parameters.join(", ");
|
||||
}
|
||||
|
||||
result += "] => ";
|
||||
const expression = getFirstChild(node, "expression");
|
||||
result += expression ? this.visitor.visit(expression, ctx) : "";
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Polymorphic function types
|
||||
visitPolymorphicFunctionType(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "[";
|
||||
|
||||
const polymorphicTypeParams = getChildNodes(
|
||||
node,
|
||||
"polymorphicTypeParameter",
|
||||
);
|
||||
if (polymorphicTypeParams.length > 0) {
|
||||
const parameters = polymorphicTypeParams.map((param: CSTNode) =>
|
||||
this.visitor.visit(param, ctx),
|
||||
);
|
||||
result += parameters.join(", ");
|
||||
}
|
||||
|
||||
result += "] => ";
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitPolymorphicTypeParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Add variance annotation if present
|
||||
const plusTokens = getChildNodes(node, "Plus");
|
||||
const minusTokens = getChildNodes(node, "Minus");
|
||||
if (plusTokens.length > 0) {
|
||||
result += "+";
|
||||
} else if (minusTokens.length > 0) {
|
||||
result += "-";
|
||||
}
|
||||
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length > 0) {
|
||||
result += getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
// Handle type bounds
|
||||
const subtypeOf = getChildNodes(node, "SubtypeOf");
|
||||
const supertypeOf = getChildNodes(node, "SupertypeOf");
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
|
||||
if (subtypeOf.length > 0 && typeNode) {
|
||||
result += " <: " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
if (supertypeOf.length > 0 && typeNode) {
|
||||
result += " >: " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Enum definitions
|
||||
visitEnumDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
let result =
|
||||
"enum " + (identifiers.length > 0 ? getNodeImage(identifiers[0]) : "");
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const classParameters = getFirstChild(node, "classParameters");
|
||||
if (classParameters) {
|
||||
result += this.visitor.visit(classParameters, ctx);
|
||||
}
|
||||
|
||||
const extendsClause = getFirstChild(node, "extendsClause");
|
||||
if (extendsClause) {
|
||||
result += " " + this.visitor.visit(extendsClause, ctx);
|
||||
}
|
||||
|
||||
result += " {\n";
|
||||
|
||||
const enumCases = getChildNodes(node, "enumCase");
|
||||
if (enumCases.length > 0) {
|
||||
const indent = this.visitor.getIndentation(ctx);
|
||||
const cases = enumCases.map(
|
||||
(c: CSTNode) => indent + this.visitor.visit(c, ctx),
|
||||
);
|
||||
result += cases.join("\n");
|
||||
}
|
||||
|
||||
result += "\n}";
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitEnumCase(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
let result =
|
||||
"case " + (identifiers.length > 0 ? getNodeImage(identifiers[0]) : "");
|
||||
|
||||
const classParameters = getFirstChild(node, "classParameters");
|
||||
if (classParameters) {
|
||||
result += this.visitor.visit(classParameters, ctx);
|
||||
}
|
||||
|
||||
const extendsClause = getFirstChild(node, "extendsClause");
|
||||
if (extendsClause) {
|
||||
result += " " + this.visitor.visit(extendsClause, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Extension methods
|
||||
visitExtensionDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "extension";
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
result +=
|
||||
" (" +
|
||||
(identifiers.length > 0 ? getNodeImage(identifiers[0]) : "") +
|
||||
": ";
|
||||
if (typeNode) {
|
||||
result += this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
result += ") {\n";
|
||||
|
||||
const extensionMembers = getChildNodes(node, "extensionMember");
|
||||
if (extensionMembers.length > 0) {
|
||||
const members = extensionMembers.map(
|
||||
(m: CSTNode) => " " + this.visitor.visit(m, ctx),
|
||||
);
|
||||
result += members.join("\n");
|
||||
}
|
||||
|
||||
result += "\n}";
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitExtensionMember(node: CSTNode, ctx: PrintContext): string {
|
||||
const modifierNodes = getChildNodes(node, "modifier");
|
||||
const modifiers = this.visitor.visitModifiers(modifierNodes, ctx);
|
||||
const defDefinition = getFirstChild(node, "defDefinition");
|
||||
const definition = defDefinition
|
||||
? this.visitor.visit(defDefinition, ctx)
|
||||
: "";
|
||||
|
||||
return modifiers ? modifiers + " " + definition : definition;
|
||||
}
|
||||
|
||||
// Given definitions
|
||||
visitGivenDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "given";
|
||||
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length > 0) {
|
||||
// Named given with potential parameters: given name[T](using ord: Type): Type
|
||||
result += " " + getNodeImage(identifiers[0]);
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const parameterLists = getFirstChild(node, "parameterLists");
|
||||
if (parameterLists) {
|
||||
result += this.visitor.visit(parameterLists, ctx);
|
||||
}
|
||||
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += ": " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
} else {
|
||||
// Anonymous given: given Type = expression
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += " " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
const equalsTokens = getChildNodes(node, "Equals");
|
||||
if (equalsTokens.length > 0) {
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
result += " = " + this.visitor.visit(expression, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Type definitions including opaque types
|
||||
visitTypeDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Handle opaque types
|
||||
const opaqueTokens = getChildNodes(node, "Opaque");
|
||||
if (opaqueTokens.length > 0) {
|
||||
result += "opaque ";
|
||||
}
|
||||
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
result +=
|
||||
"type " + (identifiers.length > 0 ? getNodeImage(identifiers[0]) : "");
|
||||
|
||||
const typeParameters = getFirstChild(node, "typeParameters");
|
||||
if (typeParameters) {
|
||||
result += this.visitor.visit(typeParameters, ctx);
|
||||
}
|
||||
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += " = " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Export clauses and expressions
|
||||
visitExportClause(node: CSTNode, ctx: PrintContext): string {
|
||||
const exportExpression = getFirstChild(node, "exportExpression");
|
||||
return (
|
||||
"export " +
|
||||
(exportExpression ? this.visitor.visit(exportExpression, ctx) : "")
|
||||
);
|
||||
}
|
||||
|
||||
visitExportExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Build the export path
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
const dots = getChildNodes(node, "Dot");
|
||||
const underscores = getChildNodes(node, "Underscore");
|
||||
const givens = getChildNodes(node, "Given");
|
||||
const leftBraces = getChildNodes(node, "LeftBrace");
|
||||
|
||||
// Add first identifier
|
||||
if (identifiers.length > 0) {
|
||||
result = getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
// Process remaining parts
|
||||
let identifierIndex = 1;
|
||||
for (let i = 0; i < dots.length; i++) {
|
||||
result += ".";
|
||||
|
||||
// Check what follows this dot
|
||||
if (underscores.length > 0 && i === dots.length - 1) {
|
||||
// Wildcard export
|
||||
result += "_";
|
||||
} else if (givens.length > 0 && i === dots.length - 1) {
|
||||
// Given export
|
||||
result += "given";
|
||||
} else if (leftBraces.length > 0 && i === dots.length - 1) {
|
||||
// Multiple export selectors
|
||||
result += "{";
|
||||
const exportSelectors = getChildNodes(node, "exportSelector");
|
||||
if (exportSelectors.length > 0) {
|
||||
const selectors = exportSelectors.map((sel: CSTNode) =>
|
||||
this.visitor.visit(sel, ctx),
|
||||
);
|
||||
result += selectors.join(", ");
|
||||
}
|
||||
result += "}";
|
||||
} else if (identifierIndex < identifiers.length) {
|
||||
// Next identifier in path
|
||||
result += getNodeImage(identifiers[identifierIndex]);
|
||||
identifierIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
visitExportSelector(node: CSTNode, _ctx: PrintContext): string {
|
||||
const underscores = getChildNodes(node, "Underscore");
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
const givens = getChildNodes(node, "Given");
|
||||
const arrows = getChildNodes(node, "Arrow");
|
||||
|
||||
// Handle wildcard export
|
||||
if (underscores.length > 0 && identifiers.length === 0) {
|
||||
return "_";
|
||||
}
|
||||
|
||||
// Handle given export
|
||||
if (givens.length > 0 && identifiers.length === 0) {
|
||||
return "given";
|
||||
}
|
||||
|
||||
let result = "";
|
||||
|
||||
// Handle regular identifiers
|
||||
if (identifiers.length > 0) {
|
||||
result = getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
// Handle given with specific identifiers: given SpecificType
|
||||
if (givens.length > 0 && identifiers.length > 0) {
|
||||
result = "given " + getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
if (arrows.length > 0) {
|
||||
result += " => ";
|
||||
if (underscores.length > 0) {
|
||||
result += "_";
|
||||
} else if (identifiers.length > 1) {
|
||||
result += getNodeImage(identifiers[1]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Context function types
|
||||
visitContextFunctionType(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Handle parenthesized types
|
||||
const leftParens = getChildNodes(node, "LeftParen");
|
||||
if (leftParens.length > 0) {
|
||||
const tupleType = getFirstChild(node, "tupleTypeOrParenthesized");
|
||||
if (tupleType) {
|
||||
result += "(" + this.visitor.visit(tupleType, ctx) + ")";
|
||||
}
|
||||
} else {
|
||||
// Handle simple types
|
||||
const simpleType = getFirstChild(node, "simpleType");
|
||||
if (simpleType) {
|
||||
result += this.visitor.visit(simpleType, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += " ?=> " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Inline and transparent modifiers
|
||||
visitInlineModifier(): string {
|
||||
return "inline";
|
||||
}
|
||||
|
||||
visitTransparentModifier(): string {
|
||||
return "transparent";
|
||||
}
|
||||
|
||||
// Using clauses
|
||||
visitUsingClause(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "using ";
|
||||
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length > 0) {
|
||||
result += getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
const colonTokens = getChildNodes(node, "Colon");
|
||||
if (colonTokens.length > 0) {
|
||||
const typeNode = getFirstChild(node, "type");
|
||||
if (typeNode) {
|
||||
result += ": " + this.visitor.visit(typeNode, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -1,658 +0,0 @@
|
||||
/**
|
||||
* Statement visitor methods for import/export, package, and other statements
|
||||
*/
|
||||
import { getChildNodes, getFirstChild, getNodeImage } from "./utils";
|
||||
import type { PrintContext, CSTNode } from "./utils";
|
||||
|
||||
export interface StatementVisitor {
|
||||
visit(node: CSTNode, ctx: PrintContext): string;
|
||||
}
|
||||
|
||||
export class StatementVisitorMethods {
|
||||
private visitor: StatementVisitor;
|
||||
|
||||
constructor(visitor: StatementVisitor) {
|
||||
this.visitor = visitor;
|
||||
}
|
||||
|
||||
visitPackageClause(node: CSTNode, ctx: PrintContext): string {
|
||||
const qualifiedIdentifier = getFirstChild(node, "qualifiedIdentifier");
|
||||
return (
|
||||
"package " +
|
||||
(qualifiedIdentifier ? this.visitor.visit(qualifiedIdentifier, ctx) : "")
|
||||
);
|
||||
}
|
||||
|
||||
visitImportClause(node: CSTNode, ctx: PrintContext): string {
|
||||
const importExpression = getFirstChild(node, "importExpression");
|
||||
return (
|
||||
"import " +
|
||||
(importExpression ? this.visitor.visit(importExpression, ctx) : "")
|
||||
);
|
||||
}
|
||||
|
||||
visitImportExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Build the import path
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
const dots = getChildNodes(node, "Dot");
|
||||
|
||||
// Add first identifier
|
||||
if (identifiers.length > 0) {
|
||||
result = getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
// Process remaining parts
|
||||
let identifierIndex = 1;
|
||||
for (let i = 0; i < dots.length; i++) {
|
||||
result += ".";
|
||||
|
||||
// Check what follows this dot
|
||||
const underscores = getChildNodes(node, "Underscore");
|
||||
const leftBraces = getChildNodes(node, "LeftBrace");
|
||||
|
||||
if (underscores.length > 0 && i === dots.length - 1) {
|
||||
// Wildcard import
|
||||
result += "_";
|
||||
} else if (leftBraces.length > 0 && i === dots.length - 1) {
|
||||
// Multiple import selectors
|
||||
result += "{";
|
||||
const importSelectors = getChildNodes(node, "importSelector");
|
||||
if (importSelectors.length > 0) {
|
||||
const selectors = importSelectors.map((sel: CSTNode) =>
|
||||
this.visitor.visit(sel, ctx),
|
||||
);
|
||||
result += selectors.join(", ");
|
||||
}
|
||||
result += "}";
|
||||
} else if (identifierIndex < identifiers.length) {
|
||||
// Next identifier in path
|
||||
result += getNodeImage(identifiers[identifierIndex]);
|
||||
identifierIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
visitImportSelector(node: CSTNode, _ctx: PrintContext): string {
|
||||
// Handle wildcard import
|
||||
const underscores = getChildNodes(node, "Underscore");
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
|
||||
if (underscores.length > 0 && identifiers.length === 0) {
|
||||
return "_";
|
||||
}
|
||||
|
||||
let result = "";
|
||||
if (identifiers.length > 0) {
|
||||
result = getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
const arrows = getChildNodes(node, "Arrow");
|
||||
if (arrows.length > 0) {
|
||||
result += " => ";
|
||||
const selectorUnderscores = getChildNodes(node, "Underscore");
|
||||
if (selectorUnderscores.length > 0) {
|
||||
result += "_";
|
||||
} else if (identifiers.length > 1) {
|
||||
result += getNodeImage(identifiers[1]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitExportClause(node: CSTNode, ctx: PrintContext): string {
|
||||
const exportExpression = getFirstChild(node, "exportExpression");
|
||||
return (
|
||||
"export " +
|
||||
(exportExpression ? this.visitor.visit(exportExpression, ctx) : "")
|
||||
);
|
||||
}
|
||||
|
||||
visitExportExpression(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Build the export path
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
const dots = getChildNodes(node, "Dot");
|
||||
|
||||
// Add first identifier
|
||||
if (identifiers.length > 0) {
|
||||
result = getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
// Process remaining parts
|
||||
let identifierIndex = 1;
|
||||
for (let i = 0; i < dots.length; i++) {
|
||||
result += ".";
|
||||
|
||||
// Check what follows this dot
|
||||
const underscores = getChildNodes(node, "Underscore");
|
||||
const givens = getChildNodes(node, "Given");
|
||||
|
||||
if (underscores.length > 0 && i === dots.length - 1) {
|
||||
// Wildcard export
|
||||
result += "_";
|
||||
} else if (givens.length > 0 && i === dots.length - 1) {
|
||||
// Given export
|
||||
result += "given";
|
||||
} else if (
|
||||
getChildNodes(node, "LeftBrace").length > 0 &&
|
||||
i === dots.length - 1
|
||||
) {
|
||||
// Multiple export selectors
|
||||
result += "{";
|
||||
const exportSelectors = getChildNodes(node, "exportSelector");
|
||||
if (exportSelectors.length > 0) {
|
||||
const selectors = exportSelectors.map((sel: CSTNode) =>
|
||||
this.visitor.visit(sel, ctx),
|
||||
);
|
||||
result += selectors.join(", ");
|
||||
}
|
||||
result += "}";
|
||||
} else if (identifierIndex < identifiers.length) {
|
||||
// Next identifier in path
|
||||
result += getNodeImage(identifiers[identifierIndex]);
|
||||
identifierIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitExportSelector(node: CSTNode): string {
|
||||
const underscores = getChildNodes(node, "Underscore");
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
const givens = getChildNodes(node, "Given");
|
||||
|
||||
// Handle wildcard export
|
||||
if (underscores.length > 0 && identifiers.length === 0) {
|
||||
return "_";
|
||||
}
|
||||
|
||||
// Handle given export
|
||||
if (givens.length > 0 && identifiers.length === 0) {
|
||||
return "given";
|
||||
}
|
||||
|
||||
let result = "";
|
||||
|
||||
// Handle regular identifiers
|
||||
if (identifiers.length > 0) {
|
||||
result = getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
// Handle given with specific identifiers: given SpecificType
|
||||
if (givens.length > 0 && identifiers.length > 0) {
|
||||
result = "given " + getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
const arrows = getChildNodes(node, "Arrow");
|
||||
if (arrows.length > 0) {
|
||||
result += " => ";
|
||||
const arrowUnderscores = getChildNodes(node, "Underscore");
|
||||
if (arrowUnderscores.length > 0) {
|
||||
result += "_";
|
||||
} else if (identifiers.length > 1) {
|
||||
result += getNodeImage(identifiers[1]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTopLevelDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Handle modifiers (including 'case')
|
||||
const modifiers = getChildNodes(node, "modifier");
|
||||
if (modifiers.length > 0) {
|
||||
const modifierStr = this.visitModifiers(modifiers, ctx);
|
||||
result += modifierStr + " ";
|
||||
}
|
||||
|
||||
// Handle definitions at top level
|
||||
const definition = getFirstChild(node, "definition");
|
||||
if (definition) {
|
||||
result += this.visitor.visit(definition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle class definitions
|
||||
const classDefinition = getFirstChild(node, "classDefinition");
|
||||
if (classDefinition) {
|
||||
result += this.visitor.visit(classDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle object definitions
|
||||
const objectDefinition = getFirstChild(node, "objectDefinition");
|
||||
if (objectDefinition) {
|
||||
result += this.visitor.visit(objectDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle trait definitions
|
||||
const traitDefinition = getFirstChild(node, "traitDefinition");
|
||||
if (traitDefinition) {
|
||||
result += this.visitor.visit(traitDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle val definitions
|
||||
const valDefinition = getFirstChild(node, "valDefinition");
|
||||
if (valDefinition) {
|
||||
result += this.visitor.visit(valDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle var definitions
|
||||
const varDefinition = getFirstChild(node, "varDefinition");
|
||||
if (varDefinition) {
|
||||
result += this.visitor.visit(varDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle def definitions
|
||||
const defDefinition = getFirstChild(node, "defDefinition");
|
||||
if (defDefinition) {
|
||||
result += this.visitor.visit(defDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle enum definitions (Scala 3)
|
||||
const enumDefinition = getFirstChild(node, "enumDefinition");
|
||||
if (enumDefinition) {
|
||||
result += this.visitor.visit(enumDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle extension definitions (Scala 3)
|
||||
const extensionDefinition = getFirstChild(node, "extensionDefinition");
|
||||
if (extensionDefinition) {
|
||||
result += this.visitor.visit(extensionDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle given definitions (Scala 3)
|
||||
const givenDefinition = getFirstChild(node, "givenDefinition");
|
||||
if (givenDefinition) {
|
||||
result += this.visitor.visit(givenDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle type definitions (including opaque types)
|
||||
const typeDefinition = getFirstChild(node, "typeDefinition");
|
||||
if (typeDefinition) {
|
||||
result += this.visitor.visit(typeDefinition, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle assignment statements
|
||||
const assignmentStatement = getFirstChild(node, "assignmentStatement");
|
||||
if (assignmentStatement) {
|
||||
result += this.visitor.visit(assignmentStatement, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle expressions
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
result += this.visitor.visit(expression, ctx);
|
||||
return result;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitBlockStatement(node: CSTNode, ctx: PrintContext): string {
|
||||
const valDefinition = getFirstChild(node, "valDefinition");
|
||||
if (valDefinition) {
|
||||
return this.visitor.visit(valDefinition, ctx);
|
||||
}
|
||||
|
||||
const varDefinition = getFirstChild(node, "varDefinition");
|
||||
if (varDefinition) {
|
||||
return this.visitor.visit(varDefinition, ctx);
|
||||
}
|
||||
|
||||
const defDefinition = getFirstChild(node, "defDefinition");
|
||||
if (defDefinition) {
|
||||
return this.visitor.visit(defDefinition, ctx);
|
||||
}
|
||||
|
||||
const assignmentStatement = getFirstChild(node, "assignmentStatement");
|
||||
if (assignmentStatement) {
|
||||
return this.visitor.visit(assignmentStatement, ctx);
|
||||
}
|
||||
|
||||
const expression = getFirstChild(node, "expression");
|
||||
if (expression) {
|
||||
return this.visitor.visit(expression, ctx);
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
visitCompilationUnit(node: CSTNode, ctx: PrintContext): string {
|
||||
const parts: string[] = [];
|
||||
|
||||
// Add package clause if it exists
|
||||
const packageClause = getFirstChild(node, "packageClause");
|
||||
if (packageClause) {
|
||||
parts.push(this.visitor.visit(packageClause, ctx));
|
||||
}
|
||||
|
||||
// Add empty line after package
|
||||
if (parts.length > 0) {
|
||||
parts.push("");
|
||||
}
|
||||
|
||||
// Add import clauses
|
||||
const importClauses = getChildNodes(node, "importClause");
|
||||
if (importClauses.length > 0) {
|
||||
importClauses.forEach((importNode: CSTNode) => {
|
||||
parts.push(this.visitor.visit(importNode, ctx));
|
||||
});
|
||||
}
|
||||
|
||||
// Add empty line after imports
|
||||
if (importClauses.length > 0) {
|
||||
parts.push("");
|
||||
}
|
||||
|
||||
// Add export clauses
|
||||
const exportClauses = getChildNodes(node, "exportClause");
|
||||
if (exportClauses.length > 0) {
|
||||
exportClauses.forEach((exportNode: CSTNode) => {
|
||||
parts.push(this.visitor.visit(exportNode, ctx));
|
||||
});
|
||||
}
|
||||
|
||||
// Don't add empty line after exports unless there are subsequent elements
|
||||
if (exportClauses.length > 0) {
|
||||
// Only add empty line if there are other elements after exports
|
||||
const topLevelDefinitions = getChildNodes(node, "topLevelDefinition");
|
||||
const topLevelStatements = getChildNodes(node, "topLevelStatement");
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
const hasSubsequentElements =
|
||||
topLevelDefinitions.length > 0 ||
|
||||
topLevelStatements.length > 0 ||
|
||||
expressions.length > 0;
|
||||
if (hasSubsequentElements) {
|
||||
parts.push("");
|
||||
}
|
||||
}
|
||||
|
||||
// Add top-level definitions
|
||||
const topLevelDefinitions = getChildNodes(node, "topLevelDefinition");
|
||||
if (topLevelDefinitions.length > 0) {
|
||||
topLevelDefinitions.forEach((def: CSTNode) => {
|
||||
parts.push(this.visitor.visit(def, ctx));
|
||||
});
|
||||
}
|
||||
|
||||
// Add top-level statements
|
||||
const topLevelStatements = getChildNodes(node, "topLevelStatement");
|
||||
if (topLevelStatements.length > 0) {
|
||||
topLevelStatements.forEach((stmt: CSTNode) => {
|
||||
parts.push(this.visitor.visit(stmt, ctx));
|
||||
});
|
||||
}
|
||||
|
||||
// Add top-level expressions
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
if (expressions.length > 0) {
|
||||
expressions.forEach((expr: CSTNode) => {
|
||||
parts.push(this.visitor.visit(expr, ctx));
|
||||
});
|
||||
}
|
||||
|
||||
// Join parts and ensure proper file formatting
|
||||
if (parts.length === 0) return "";
|
||||
if (parts.length === 1) return parts[0] + "\n";
|
||||
|
||||
// For multiple parts, join with newlines and add trailing newline
|
||||
return parts.join("\n") + "\n";
|
||||
}
|
||||
|
||||
visitAnnotations(annotations: CSTNode[], ctx: PrintContext): string {
|
||||
return annotations.map((ann) => this.visitor.visit(ann, ctx)).join(" ");
|
||||
}
|
||||
|
||||
visitAnnotation(node: CSTNode, ctx: PrintContext): string {
|
||||
const qualifiedIdentifier = getFirstChild(node, "qualifiedIdentifier");
|
||||
let result =
|
||||
"@" +
|
||||
(qualifiedIdentifier ? this.visitor.visit(qualifiedIdentifier, ctx) : "");
|
||||
|
||||
// Handle multiple parameter lists: @Inject() or @Inject()(val x: Type)
|
||||
const leftParens = getChildNodes(node, "LeftParen");
|
||||
|
||||
if (leftParens.length > 0) {
|
||||
const annotationArguments = getChildNodes(node, "annotationArgument");
|
||||
let argIndex = 0;
|
||||
|
||||
// Process each parameter list
|
||||
for (let i = 0; i < leftParens.length; i++) {
|
||||
result += "(";
|
||||
|
||||
// Determine how many arguments are in this parameter list
|
||||
// We need to group arguments by parameter list
|
||||
const argsInThisList: CSTNode[] = [];
|
||||
|
||||
// For simplicity, distribute arguments evenly across parameter lists
|
||||
// In practice, this should be based on actual parsing structure
|
||||
const argsPerList = Math.ceil(
|
||||
annotationArguments.length / leftParens.length,
|
||||
);
|
||||
const endIndex = Math.min(
|
||||
argIndex + argsPerList,
|
||||
annotationArguments.length,
|
||||
);
|
||||
|
||||
for (let j = argIndex; j < endIndex; j++) {
|
||||
argsInThisList.push(annotationArguments[j]);
|
||||
}
|
||||
argIndex = endIndex;
|
||||
|
||||
if (argsInThisList.length > 0) {
|
||||
const args = argsInThisList.map((arg: CSTNode) =>
|
||||
this.visitor.visit(arg, ctx),
|
||||
);
|
||||
result += args.join(", ");
|
||||
}
|
||||
|
||||
result += ")";
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitAnnotationArgument(node: CSTNode, ctx: PrintContext): string {
|
||||
const valTokens = getChildNodes(node, "Val");
|
||||
const varTokens = getChildNodes(node, "Var");
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
const colons = getChildNodes(node, "Colon");
|
||||
const equals = getChildNodes(node, "Equals");
|
||||
const expressions = getChildNodes(node, "expression");
|
||||
const types = getChildNodes(node, "type");
|
||||
|
||||
// Parameter declaration: val x: Type or var y: Type
|
||||
if (
|
||||
(valTokens.length > 0 || varTokens.length > 0) &&
|
||||
identifiers.length > 0 &&
|
||||
colons.length > 0 &&
|
||||
types.length > 0
|
||||
) {
|
||||
let result = valTokens.length > 0 ? "val " : "var ";
|
||||
result += getNodeImage(identifiers[0]);
|
||||
result += ": ";
|
||||
result += this.visitor.visit(types[0], ctx);
|
||||
|
||||
// Optional default value
|
||||
if (equals.length > 0 && expressions.length > 0) {
|
||||
result += " = " + this.visitor.visit(expressions[0], ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
// Named argument: name = value
|
||||
else if (
|
||||
identifiers.length > 0 &&
|
||||
equals.length > 0 &&
|
||||
expressions.length > 0
|
||||
) {
|
||||
return (
|
||||
getNodeImage(identifiers[0]) +
|
||||
" = " +
|
||||
this.visitor.visit(expressions[0], ctx)
|
||||
);
|
||||
}
|
||||
// Positional argument
|
||||
else if (expressions.length > 0) {
|
||||
return this.visitor.visit(expressions[0], ctx);
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
visitModifiers(modifiers: CSTNode[], ctx: PrintContext): string {
|
||||
return modifiers.map((mod) => this.visitor.visit(mod, ctx)).join(" ");
|
||||
}
|
||||
|
||||
visitDefinition(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Handle annotations
|
||||
const annotations = getChildNodes(node, "annotation");
|
||||
if (annotations.length > 0) {
|
||||
const annotationStr = this.visitAnnotations(annotations, ctx);
|
||||
result += annotationStr + " ";
|
||||
}
|
||||
|
||||
// Handle modifiers
|
||||
const modifiers = getChildNodes(node, "modifier");
|
||||
if (modifiers.length > 0) {
|
||||
const modifierStr = this.visitModifiers(modifiers, ctx);
|
||||
result += modifierStr + " ";
|
||||
}
|
||||
|
||||
// Handle specific definition types
|
||||
const classDefinition = getFirstChild(node, "classDefinition");
|
||||
if (classDefinition) {
|
||||
result += this.visitor.visit(classDefinition, ctx);
|
||||
} else {
|
||||
const objectDefinition = getFirstChild(node, "objectDefinition");
|
||||
if (objectDefinition) {
|
||||
result += this.visitor.visit(objectDefinition, ctx);
|
||||
} else {
|
||||
const traitDefinition = getFirstChild(node, "traitDefinition");
|
||||
if (traitDefinition) {
|
||||
result += this.visitor.visit(traitDefinition, ctx);
|
||||
} else {
|
||||
const enumDefinition = getFirstChild(node, "enumDefinition");
|
||||
if (enumDefinition) {
|
||||
result += this.visitor.visit(enumDefinition, ctx);
|
||||
} else {
|
||||
const extensionDefinition = getFirstChild(
|
||||
node,
|
||||
"extensionDefinition",
|
||||
);
|
||||
if (extensionDefinition) {
|
||||
result += this.visitor.visit(extensionDefinition, ctx);
|
||||
} else {
|
||||
const valDefinition = getFirstChild(node, "valDefinition");
|
||||
if (valDefinition) {
|
||||
result += this.visitor.visit(valDefinition, ctx);
|
||||
} else {
|
||||
const varDefinition = getFirstChild(node, "varDefinition");
|
||||
if (varDefinition) {
|
||||
result += this.visitor.visit(varDefinition, ctx);
|
||||
} else {
|
||||
const defDefinition = getFirstChild(node, "defDefinition");
|
||||
if (defDefinition) {
|
||||
result += this.visitor.visit(defDefinition, ctx);
|
||||
} else {
|
||||
const givenDefinition = getFirstChild(
|
||||
node,
|
||||
"givenDefinition",
|
||||
);
|
||||
if (givenDefinition) {
|
||||
result += this.visitor.visit(givenDefinition, ctx);
|
||||
} else {
|
||||
const typeDefinition = getFirstChild(
|
||||
node,
|
||||
"typeDefinition",
|
||||
);
|
||||
if (typeDefinition) {
|
||||
result += this.visitor.visit(typeDefinition, ctx);
|
||||
} else {
|
||||
const assignmentStatement = getFirstChild(
|
||||
node,
|
||||
"assignmentStatement",
|
||||
);
|
||||
if (assignmentStatement) {
|
||||
result += this.visitor.visit(
|
||||
assignmentStatement,
|
||||
ctx,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitPattern(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length > 0) {
|
||||
return getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
const underscores = getChildNodes(node, "Underscore");
|
||||
if (underscores.length > 0) {
|
||||
return "_";
|
||||
}
|
||||
|
||||
const literal = getFirstChild(node, "literal");
|
||||
if (literal) {
|
||||
return this.visitor.visit(literal, ctx);
|
||||
}
|
||||
|
||||
const leftParens = getChildNodes(node, "LeftParen");
|
||||
if (leftParens.length > 0) {
|
||||
// Tuple pattern or parenthesized pattern
|
||||
const patterns = getChildNodes(node, "pattern");
|
||||
if (patterns.length > 1) {
|
||||
const patternResults = patterns.map((p: CSTNode) =>
|
||||
this.visitor.visit(p, ctx),
|
||||
);
|
||||
return "(" + patternResults.join(", ") + ")";
|
||||
} else if (patterns.length === 1) {
|
||||
return "(" + this.visitor.visit(patterns[0], ctx) + ")";
|
||||
}
|
||||
}
|
||||
|
||||
const patterns = getChildNodes(node, "pattern");
|
||||
if (patterns.length > 0) {
|
||||
// Constructor pattern or other complex patterns
|
||||
return this.visitor.visit(patterns[0], ctx);
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
}
|
||||
@@ -1,474 +0,0 @@
|
||||
/**
|
||||
* Type-related visitor methods for handling type expressions, type parameters, and type systems
|
||||
*/
|
||||
import { getChildNodes, getFirstChild, getNodeImage } from "./utils";
|
||||
import type { PrintContext, CSTNode } from "./utils";
|
||||
|
||||
export interface TypeVisitor {
|
||||
visit(node: CSTNode, ctx: PrintContext): string;
|
||||
}
|
||||
|
||||
export class TypeVisitorMethods {
|
||||
private visitor: TypeVisitor;
|
||||
|
||||
constructor(visitor: TypeVisitor) {
|
||||
this.visitor = visitor;
|
||||
}
|
||||
|
||||
visitType(node: CSTNode, ctx: PrintContext): string {
|
||||
const matchType = getFirstChild(node, "matchType");
|
||||
return matchType ? this.visitor.visit(matchType, ctx) : "";
|
||||
}
|
||||
|
||||
visitMatchType(node: CSTNode, ctx: PrintContext): string {
|
||||
const unionType = getFirstChild(node, "unionType");
|
||||
let result = unionType ? this.visitor.visit(unionType, ctx) : "";
|
||||
|
||||
const matchTokens = getChildNodes(node, "Match");
|
||||
if (matchTokens.length > 0) {
|
||||
result += " match {";
|
||||
const matchTypeCases = getChildNodes(node, "matchTypeCase");
|
||||
if (matchTypeCases.length > 0) {
|
||||
for (const caseNode of matchTypeCases) {
|
||||
result += "\n " + this.visitor.visit(caseNode, ctx);
|
||||
}
|
||||
result += "\n";
|
||||
}
|
||||
result += "}";
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitMatchTypeCase(node: CSTNode, ctx: PrintContext): string {
|
||||
const types = getChildNodes(node, "type");
|
||||
if (types.length >= 2) {
|
||||
const leftType = this.visitor.visit(types[0], ctx);
|
||||
const rightType = this.visitor.visit(types[1], ctx);
|
||||
return `case ${leftType} => ${rightType}`;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
visitUnionType(node: CSTNode, ctx: PrintContext): string {
|
||||
const types = getChildNodes(node, "intersectionType");
|
||||
if (types.length === 1) {
|
||||
return this.visitor.visit(types[0], ctx);
|
||||
}
|
||||
|
||||
const typeStrings = types.map((t: CSTNode) => this.visitor.visit(t, ctx));
|
||||
return typeStrings.join(" | ");
|
||||
}
|
||||
|
||||
visitIntersectionType(node: CSTNode, ctx: PrintContext): string {
|
||||
const types = getChildNodes(node, "baseType");
|
||||
if (types.length === 1) {
|
||||
return this.visitor.visit(types[0], ctx);
|
||||
}
|
||||
|
||||
const typeStrings = types.map((t: CSTNode) => this.visitor.visit(t, ctx));
|
||||
return typeStrings.join(" & ");
|
||||
}
|
||||
|
||||
visitContextFunctionType(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Handle parenthesized types
|
||||
const leftParen = getChildNodes(node, "LeftParen");
|
||||
if (leftParen.length > 0) {
|
||||
const tupleType = getFirstChild(node, "tupleTypeOrParenthesized");
|
||||
if (tupleType) {
|
||||
result += "(" + this.visitor.visit(tupleType, ctx) + ")";
|
||||
}
|
||||
} else {
|
||||
// Handle simple types
|
||||
const simpleType = getFirstChild(node, "simpleType");
|
||||
if (simpleType) {
|
||||
result += this.visitor.visit(simpleType, ctx);
|
||||
}
|
||||
}
|
||||
|
||||
const type = getFirstChild(node, "type");
|
||||
if (type) {
|
||||
result += " ?=> " + this.visitor.visit(type, ctx);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
visitBaseType(node: CSTNode, ctx: PrintContext): string {
|
||||
// Handle type lambda: [X] =>> F[X]
|
||||
const typeLambda = getFirstChild(node, "typeLambda");
|
||||
if (typeLambda) {
|
||||
return this.visitor.visit(typeLambda, ctx);
|
||||
}
|
||||
|
||||
// Handle polymorphic function type: [T] => T => T
|
||||
const polymorphicFunctionType = getFirstChild(
|
||||
node,
|
||||
"polymorphicFunctionType",
|
||||
);
|
||||
if (polymorphicFunctionType) {
|
||||
return this.visitor.visit(polymorphicFunctionType, ctx);
|
||||
}
|
||||
|
||||
// Handle context function type: String ?=> Int
|
||||
const contextFunctionType = getFirstChild(node, "contextFunctionType");
|
||||
if (contextFunctionType) {
|
||||
return this.visitor.visit(contextFunctionType, ctx);
|
||||
}
|
||||
|
||||
// Handle dependent function type: (x: Int) => Vector[x.type]
|
||||
const dependentFunctionType = getFirstChild(node, "dependentFunctionType");
|
||||
if (dependentFunctionType) {
|
||||
return this.visitor.visit(dependentFunctionType, ctx);
|
||||
}
|
||||
|
||||
// Handle parenthesized types or tuple types: (String | Int) or (A, B)
|
||||
const leftParen = getChildNodes(node, "LeftParen");
|
||||
const tupleType = getFirstChild(node, "tupleTypeOrParenthesized");
|
||||
if (leftParen.length > 0 && tupleType) {
|
||||
return "(" + this.visitor.visit(tupleType, ctx) + ")";
|
||||
}
|
||||
|
||||
// Handle simple types with array notation
|
||||
const simpleType = getFirstChild(node, "simpleType");
|
||||
let result = "";
|
||||
|
||||
if (simpleType) {
|
||||
result = this.visitor.visit(simpleType, ctx);
|
||||
} else {
|
||||
// Handle direct token cases like Array, List, etc.
|
||||
if ("children" in node && node.children) {
|
||||
const children = node.children;
|
||||
for (const [key, tokens] of Object.entries(children)) {
|
||||
if (
|
||||
Array.isArray(tokens) &&
|
||||
tokens.length > 0 &&
|
||||
"image" in tokens[0]
|
||||
) {
|
||||
// Check if this is a type name token (not brackets or keywords)
|
||||
if (
|
||||
!["LeftBracket", "RightBracket", "typeArgument"].includes(key)
|
||||
) {
|
||||
result = getNodeImage(tokens[0]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!result) {
|
||||
return "";
|
||||
}
|
||||
|
||||
// Handle array types like Array[String]
|
||||
const leftBrackets = getChildNodes(node, "LeftBracket");
|
||||
const typeArguments = getChildNodes(node, "typeArgument");
|
||||
for (let i = 0; i < leftBrackets.length && i < typeArguments.length; i++) {
|
||||
result += "[" + this.visitor.visit(typeArguments[i], ctx) + "]";
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTupleTypeOrParenthesized(node: CSTNode, ctx: PrintContext): string {
|
||||
const types = getChildNodes(node, "type");
|
||||
if (types.length === 1) {
|
||||
return this.visitor.visit(types[0], ctx);
|
||||
}
|
||||
|
||||
const typeStrings = types.map((t: CSTNode) => this.visitor.visit(t, ctx));
|
||||
return typeStrings.join(", ");
|
||||
}
|
||||
|
||||
visitSimpleType(node: CSTNode, ctx: PrintContext): string {
|
||||
const qualifiedId = getFirstChild(node, "qualifiedIdentifier");
|
||||
if (!qualifiedId) {
|
||||
return "";
|
||||
}
|
||||
let result = this.visitor.visit(qualifiedId, ctx);
|
||||
|
||||
// Handle type parameters like List[Int] or Kind Projector like Map[String, *]
|
||||
const leftBrackets = getChildNodes(node, "LeftBracket");
|
||||
if (leftBrackets.length > 0) {
|
||||
const typeArgs = getChildNodes(node, "typeArgument");
|
||||
const typeStrings = typeArgs.map((t: CSTNode) =>
|
||||
this.visitor.visit(t, ctx),
|
||||
);
|
||||
result += "[" + typeStrings.join(", ") + "]";
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTypeArgument(node: CSTNode, ctx: PrintContext): string {
|
||||
// Handle Kind Projector notation: *
|
||||
const star = getChildNodes(node, "Star");
|
||||
if (star.length > 0) {
|
||||
return "*";
|
||||
}
|
||||
|
||||
// Handle regular type
|
||||
const type = getFirstChild(node, "type");
|
||||
if (type) {
|
||||
return this.visitor.visit(type, ctx);
|
||||
}
|
||||
|
||||
// Handle type argument union structure
|
||||
const typeArgumentUnion = getFirstChild(node, "typeArgumentUnion");
|
||||
if (typeArgumentUnion) {
|
||||
return this.visitor.visit(typeArgumentUnion, ctx);
|
||||
}
|
||||
|
||||
// Handle direct type tokens like Array[t] within type arguments
|
||||
if ("children" in node && node.children) {
|
||||
const children = node.children;
|
||||
let result = "";
|
||||
|
||||
// Find the type name token
|
||||
for (const [key, tokens] of Object.entries(children)) {
|
||||
if (
|
||||
Array.isArray(tokens) &&
|
||||
tokens.length > 0 &&
|
||||
"image" in tokens[0]
|
||||
) {
|
||||
if (!["LeftBracket", "RightBracket", "typeArgument"].includes(key)) {
|
||||
result = getNodeImage(tokens[0]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (result) {
|
||||
// Handle type parameters like Array[t] within type arguments
|
||||
const leftBrackets = getChildNodes(node, "LeftBracket");
|
||||
const typeArguments = getChildNodes(node, "typeArgument");
|
||||
for (
|
||||
let i = 0;
|
||||
i < leftBrackets.length && i < typeArguments.length;
|
||||
i++
|
||||
) {
|
||||
result += "[" + this.visitor.visit(typeArguments[i], ctx) + "]";
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
visitTypeLambda(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "[";
|
||||
|
||||
const parameters = getChildNodes(node, "typeLambdaParameter");
|
||||
if (parameters.length > 0) {
|
||||
const parameterStrings = parameters.map((param: CSTNode) =>
|
||||
this.visitor.visit(param, ctx),
|
||||
);
|
||||
result += parameterStrings.join(", ");
|
||||
}
|
||||
|
||||
result += "] =>> ";
|
||||
const type = getFirstChild(node, "type");
|
||||
if (type) {
|
||||
result += this.visitor.visit(type, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTypeLambdaParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Add variance annotation if present
|
||||
const plus = getChildNodes(node, "Plus");
|
||||
const minus = getChildNodes(node, "Minus");
|
||||
if (plus.length > 0) {
|
||||
result += "+";
|
||||
} else if (minus.length > 0) {
|
||||
result += "-";
|
||||
}
|
||||
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length > 0) {
|
||||
result += getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
const subtypeOf = getChildNodes(node, "SubtypeOf");
|
||||
const supertypeOf = getChildNodes(node, "SupertypeOf");
|
||||
const type = getFirstChild(node, "type");
|
||||
|
||||
if (subtypeOf.length > 0 && type) {
|
||||
result += " <: " + this.visitor.visit(type, ctx);
|
||||
} else if (supertypeOf.length > 0 && type) {
|
||||
result += " >: " + this.visitor.visit(type, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitDependentFunctionType(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "(";
|
||||
|
||||
const parameters = getChildNodes(node, "dependentParameter");
|
||||
if (parameters.length > 0) {
|
||||
const parameterStrings = parameters.map((param: CSTNode) =>
|
||||
this.visitor.visit(param, ctx),
|
||||
);
|
||||
result += parameterStrings.join(", ");
|
||||
}
|
||||
|
||||
result += ") => ";
|
||||
const type = getFirstChild(node, "type");
|
||||
if (type) {
|
||||
result += this.visitor.visit(type, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitDependentParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
let result = getNodeImage(identifiers[0]);
|
||||
const type = getFirstChild(node, "type");
|
||||
if (type) {
|
||||
result += ": " + this.visitor.visit(type, ctx);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
visitPolymorphicFunctionType(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "[";
|
||||
|
||||
const parameters = getChildNodes(node, "polymorphicTypeParameter");
|
||||
if (parameters.length > 0) {
|
||||
const parameterStrings = parameters.map((param: CSTNode) =>
|
||||
this.visitor.visit(param, ctx),
|
||||
);
|
||||
result += parameterStrings.join(", ");
|
||||
}
|
||||
|
||||
result += "] => ";
|
||||
const type = getFirstChild(node, "type");
|
||||
if (type) {
|
||||
result += this.visitor.visit(type, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitPolymorphicTypeParameter(node: CSTNode, ctx: PrintContext): string {
|
||||
let result = "";
|
||||
|
||||
// Handle variance annotation
|
||||
const plus = getChildNodes(node, "Plus");
|
||||
const minus = getChildNodes(node, "Minus");
|
||||
if (plus.length > 0) {
|
||||
result += "+";
|
||||
} else if (minus.length > 0) {
|
||||
result += "-";
|
||||
}
|
||||
|
||||
const identifiers = getChildNodes(node, "Identifier");
|
||||
if (identifiers.length > 0) {
|
||||
result += getNodeImage(identifiers[0]);
|
||||
}
|
||||
|
||||
// Handle type bounds
|
||||
const subtypeOf = getChildNodes(node, "SubtypeOf");
|
||||
const supertypeOf = getChildNodes(node, "SupertypeOf");
|
||||
const type = getFirstChild(node, "type");
|
||||
|
||||
if (subtypeOf.length > 0 && type) {
|
||||
result += " <: " + this.visitor.visit(type, ctx);
|
||||
}
|
||||
if (supertypeOf.length > 0 && type) {
|
||||
result += " >: " + this.visitor.visit(type, ctx);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTypeArgumentUnion(node: CSTNode, ctx: PrintContext): string {
|
||||
const typeArgumentIntersections = getChildNodes(
|
||||
node,
|
||||
"typeArgumentIntersection",
|
||||
);
|
||||
|
||||
if (typeArgumentIntersections.length === 1) {
|
||||
return this.visitor.visit(typeArgumentIntersections[0], ctx);
|
||||
}
|
||||
|
||||
// Handle union types with | operator
|
||||
if (typeArgumentIntersections.length > 1) {
|
||||
const typeStrings = typeArgumentIntersections.map((t: CSTNode) =>
|
||||
this.visitor.visit(t, ctx),
|
||||
);
|
||||
return typeStrings.join(" | ");
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
visitTypeArgumentIntersection(node: CSTNode, ctx: PrintContext): string {
|
||||
const typeArgumentSimples = getChildNodes(node, "typeArgumentSimple");
|
||||
|
||||
if (typeArgumentSimples.length === 1) {
|
||||
return this.visitor.visit(typeArgumentSimples[0], ctx);
|
||||
}
|
||||
|
||||
// Handle intersection types with & operator
|
||||
if (typeArgumentSimples.length > 1) {
|
||||
const typeStrings = typeArgumentSimples.map((t: CSTNode) =>
|
||||
this.visitor.visit(t, ctx),
|
||||
);
|
||||
return typeStrings.join(" & ");
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
visitTypeArgumentSimple(node: CSTNode, ctx: PrintContext): string {
|
||||
const qualifiedIdentifier = getFirstChild(node, "qualifiedIdentifier");
|
||||
if (qualifiedIdentifier) {
|
||||
let result = this.visitor.visit(qualifiedIdentifier, ctx);
|
||||
|
||||
// Handle type parameters like List[*] within type arguments
|
||||
const leftBrackets = getChildNodes(node, "LeftBracket");
|
||||
if (leftBrackets.length > 0) {
|
||||
const typeArgs = getChildNodes(node, "typeArgument");
|
||||
const typeStrings = typeArgs.map((t: CSTNode) =>
|
||||
this.visitor.visit(t, ctx),
|
||||
);
|
||||
result += "[" + typeStrings.join(", ") + "]";
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Handle simple type structures like List[*]
|
||||
const simpleType = getFirstChild(node, "simpleType");
|
||||
if (simpleType) {
|
||||
return this.visitor.visit(simpleType, ctx);
|
||||
}
|
||||
|
||||
// Handle base type structures
|
||||
const baseType = getFirstChild(node, "baseType");
|
||||
if (baseType) {
|
||||
return this.visitor.visit(baseType, ctx);
|
||||
}
|
||||
|
||||
// Handle other type argument patterns
|
||||
const identifier = getFirstChild(node, "Identifier");
|
||||
if (identifier) {
|
||||
return getNodeImage(identifier);
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
}
|
||||
@@ -1,295 +0,0 @@
|
||||
import type { ScalaCstNode, IToken } from "../scala-parser";
|
||||
|
||||
/**
|
||||
* ビジターパターンで使用する共有ユーティリティとフォーマットヘルパー
|
||||
*/
|
||||
|
||||
export interface PrettierOptions {
|
||||
printWidth?: number;
|
||||
tabWidth?: number;
|
||||
useTabs?: boolean;
|
||||
semi?: boolean;
|
||||
singleQuote?: boolean;
|
||||
trailingComma?: "all" | "multiline" | "none";
|
||||
scalaLineWidth?: number; // Deprecated, for backward compatibility
|
||||
}
|
||||
|
||||
// CST要素(ノードまたはトークン)のユニオン型
|
||||
export type CSTNode = ScalaCstNode | IToken;
|
||||
|
||||
export type PrintContext = {
|
||||
path: unknown;
|
||||
options: PrettierOptions;
|
||||
print: (node: CSTNode) => string;
|
||||
indentLevel: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* nullチェック付きでノードの子要素に安全にアクセス
|
||||
* @param node - 対象ノード
|
||||
* @returns 子要素のマップ
|
||||
*/
|
||||
export function getChildren(node: CSTNode): Record<string, CSTNode[]> {
|
||||
if ("children" in node && node.children) {
|
||||
return node.children as Record<string, CSTNode[]>;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* キーで指定した子ノードを安全に取得
|
||||
* @param node - 対象ノード
|
||||
* @param key - 子ノードのキー
|
||||
* @returns 子ノードの配列
|
||||
*/
|
||||
export function getChildNodes(node: CSTNode, key: string): CSTNode[] {
|
||||
return getChildren(node)[key] || [];
|
||||
}
|
||||
|
||||
/**
|
||||
* キーで指定した最初の子ノードを安全に取得
|
||||
* @param node - 対象ノード
|
||||
* @param key - 子ノードのキー
|
||||
* @returns 最初の子ノードまたはundefined
|
||||
*/
|
||||
export function getFirstChild(node: CSTNode, key: string): CSTNode | undefined {
|
||||
const children = getChildNodes(node, key);
|
||||
return children.length > 0 ? children[0] : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* ノードのimageプロパティを安全に取得
|
||||
* @param node - 対象ノード
|
||||
* @returns imageプロパティまたは空文字列
|
||||
*/
|
||||
export function getNodeImage(node: CSTNode): string {
|
||||
if ("image" in node && node.image) {
|
||||
return node.image;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* nullまたはundefinedの可能性があるノードのimageを安全に取得
|
||||
* @param node - 対象ノード(null/undefined可)
|
||||
* @returns imageプロパティまたは空文字列
|
||||
*/
|
||||
export function getNodeImageSafe(node: CSTNode | undefined | null): string {
|
||||
if (node && "image" in node && node.image) {
|
||||
return node.image;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* 有効なprintWidthを取得(scalafmt互換性をサポート)
|
||||
* @param ctx - 印刷コンテキスト
|
||||
* @returns 有効な行幅
|
||||
*/
|
||||
export function getPrintWidth(ctx: PrintContext): number {
|
||||
// PrettierのprintWidthを使用(scalafmtのmaxColumn互換)
|
||||
if (ctx.options.printWidth) {
|
||||
return ctx.options.printWidth;
|
||||
}
|
||||
|
||||
// 後方互換性のため非推奨のscalaLineWidthにフォールバック
|
||||
if (ctx.options.scalaLineWidth) {
|
||||
// 開発環境で非推奨警告を表示
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
console.warn(
|
||||
"scalaLineWidth is deprecated. Use printWidth instead for scalafmt compatibility.",
|
||||
);
|
||||
}
|
||||
return ctx.options.scalaLineWidth;
|
||||
}
|
||||
|
||||
// デフォルト値
|
||||
return 80;
|
||||
}
|
||||
|
||||
/**
|
||||
* 有効なtabWidthを取得(scalafmt互換性をサポート)
|
||||
* @param ctx - 印刷コンテキスト
|
||||
* @returns 有効なタブ幅
|
||||
*/
|
||||
export function getTabWidth(ctx: PrintContext): number {
|
||||
// PrettierのtabWidthを使用(scalafmtのindent.main互換)
|
||||
if (ctx.options.tabWidth) {
|
||||
return ctx.options.tabWidth;
|
||||
}
|
||||
|
||||
// デフォルト値
|
||||
return 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* セミコロンのフォーマットを処理(Prettierのsemiオプションをサポート)
|
||||
* @param statement - ステートメント文字列
|
||||
* @param ctx - 印刷コンテキスト
|
||||
* @returns フォーマット済みのステートメント
|
||||
*/
|
||||
export function formatStatement(statement: string, ctx: PrintContext): string {
|
||||
// Prettierのsemiオプションを使用
|
||||
// プラグインはScala用にデフォルトsemi=falseを設定するが、明示的なユーザー選択を尊重
|
||||
const useSemi = ctx.options.semi === true;
|
||||
|
||||
// 既存の末尾セミコロンを削除
|
||||
const cleanStatement = statement.replace(/;\s*$/, "");
|
||||
|
||||
// リクエストされた場合セミコロンを追加
|
||||
if (useSemi) {
|
||||
return cleanStatement + ";";
|
||||
}
|
||||
|
||||
return cleanStatement;
|
||||
}
|
||||
|
||||
/**
|
||||
* 文字列クォートのフォーマットを処理(PrettierのsingleQuoteオプションをサポート)
|
||||
* @param content - 文字列リテラルの内容
|
||||
* @param ctx - 印刷コンテキスト
|
||||
* @returns フォーマット済みの文字列
|
||||
*/
|
||||
export function formatStringLiteral(
|
||||
content: string,
|
||||
ctx: PrintContext,
|
||||
): string {
|
||||
// PrettierのsingleQuoteオプションを使用
|
||||
const useSingleQuote = ctx.options.singleQuote === true;
|
||||
|
||||
// 文字列補間をスキップ(s"、f"、raw"などで始まる)
|
||||
if (content.match(/^[a-zA-Z]"/)) {
|
||||
return content; // 補間文字列は変更しない
|
||||
}
|
||||
|
||||
// 内容を抽出
|
||||
let innerContent = content;
|
||||
|
||||
if (content.startsWith('"') && content.endsWith('"')) {
|
||||
innerContent = content.slice(1, -1);
|
||||
} else if (content.startsWith("'") && content.endsWith("'")) {
|
||||
innerContent = content.slice(1, -1);
|
||||
} else {
|
||||
return content; // Not a string literal
|
||||
}
|
||||
|
||||
// Choose target quote based on option
|
||||
const targetQuote = useSingleQuote ? "'" : '"';
|
||||
|
||||
// Handle escaping if necessary
|
||||
if (targetQuote === "'") {
|
||||
// Converting to single quotes: escape single quotes, unescape double quotes
|
||||
innerContent = innerContent.replace(/\\"/g, '"').replace(/'/g, "\\'");
|
||||
} else {
|
||||
// Converting to double quotes: escape double quotes, unescape single quotes
|
||||
innerContent = innerContent.replace(/\\'/g, "'").replace(/"/g, '\\"');
|
||||
}
|
||||
|
||||
return targetQuote + innerContent + targetQuote;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to handle indentation using configured tab width
|
||||
*/
|
||||
export function createIndent(level: number, ctx: PrintContext): string {
|
||||
const tabWidth = getTabWidth(ctx);
|
||||
const useTabs = ctx.options.useTabs === true;
|
||||
|
||||
if (useTabs) {
|
||||
return "\t".repeat(level);
|
||||
} else {
|
||||
return " ".repeat(level * tabWidth);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to handle trailing comma formatting
|
||||
*/
|
||||
export function formatTrailingComma(
|
||||
elements: string[],
|
||||
ctx: PrintContext,
|
||||
isMultiline: boolean = false,
|
||||
): string {
|
||||
if (elements.length === 0) return "";
|
||||
|
||||
const trailingComma = ctx.options.trailingComma;
|
||||
|
||||
if (
|
||||
trailingComma === "all" ||
|
||||
(trailingComma === "multiline" && isMultiline)
|
||||
) {
|
||||
return elements.join(", ") + ",";
|
||||
}
|
||||
|
||||
return elements.join(", ");
|
||||
}
|
||||
|
||||
/**
|
||||
* Attach original comments to the formatted result
|
||||
*/
|
||||
export function attachOriginalComments(
|
||||
result: string,
|
||||
originalComments: CSTNode[],
|
||||
): string {
|
||||
if (!originalComments || originalComments.length === 0) {
|
||||
return result;
|
||||
}
|
||||
|
||||
const lines = result.split("\n");
|
||||
const commentMap = new Map<number, string[]>();
|
||||
|
||||
// Group comments by line number
|
||||
originalComments.forEach((comment) => {
|
||||
const line = ("startLine" in comment && comment.startLine) || 1;
|
||||
if (!commentMap.has(line)) {
|
||||
commentMap.set(line, []);
|
||||
}
|
||||
let commentText = "";
|
||||
if ("image" in comment && comment.image) {
|
||||
commentText = comment.image;
|
||||
} else if ("value" in comment && comment.value) {
|
||||
commentText = String(comment.value);
|
||||
}
|
||||
if (commentText) {
|
||||
commentMap.get(line)!.push(commentText);
|
||||
}
|
||||
});
|
||||
|
||||
// Insert comments into lines
|
||||
const resultLines: string[] = [];
|
||||
lines.forEach((line, index) => {
|
||||
const lineNumber = index + 1;
|
||||
if (commentMap.has(lineNumber)) {
|
||||
const comments = commentMap.get(lineNumber)!;
|
||||
comments.forEach((comment) => {
|
||||
resultLines.push(comment);
|
||||
});
|
||||
}
|
||||
resultLines.push(line);
|
||||
});
|
||||
|
||||
return resultLines.join("\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Format method or class parameters with proper line breaks
|
||||
*/
|
||||
export function formatParameterList(
|
||||
parameters: CSTNode[],
|
||||
ctx: PrintContext,
|
||||
visitFn: (param: CSTNode, ctx: PrintContext) => string,
|
||||
): string {
|
||||
if (parameters.length === 0) return "";
|
||||
|
||||
const paramStrings = parameters.map((param) => visitFn(param, ctx));
|
||||
const printWidth = getPrintWidth(ctx);
|
||||
const joined = paramStrings.join(", ");
|
||||
|
||||
// If the line is too long, break into multiple lines
|
||||
if (joined.length > printWidth * 0.7) {
|
||||
const indent = createIndent(1, ctx);
|
||||
return "\n" + indent + paramStrings.join(",\n" + indent) + "\n";
|
||||
}
|
||||
|
||||
return joined;
|
||||
}
|
||||
@@ -32,33 +32,38 @@ const sqlParser: Parser<string> = {
|
||||
locEnd: (node: string) => node.length,
|
||||
};
|
||||
|
||||
// Initialize SQL Format WASM module
|
||||
// Lazy initialize SQL Format WASM module
|
||||
let initPromise: Promise<void> | null = null;
|
||||
let isInitialized = false;
|
||||
|
||||
function initSqlFmt(): Promise<void> {
|
||||
if (initPromise) {
|
||||
return initPromise;
|
||||
if (isInitialized) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
initPromise = (async () => {
|
||||
if (!isInitialized) {
|
||||
await sqlFmtInit();
|
||||
isInitialized = true;
|
||||
}
|
||||
})();
|
||||
if (!initPromise) {
|
||||
initPromise = (async () => {
|
||||
try {
|
||||
await sqlFmtInit();
|
||||
isInitialized = true;
|
||||
} catch (error) {
|
||||
console.warn('Failed to initialize SQL Format WASM module:', error);
|
||||
initPromise = null;
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
// Printer configuration
|
||||
const sqlPrinter: Printer<string> = {
|
||||
print: (path, options) => {
|
||||
// @ts-expect-error -- Support async printer like shell plugin
|
||||
async print(path, options) {
|
||||
try {
|
||||
if (!isInitialized) {
|
||||
console.warn('SQL Format WASM module not initialized, returning original text');
|
||||
return (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
}
|
||||
// Wait for initialization to complete
|
||||
await initSqlFmt();
|
||||
|
||||
const text = (path as any).getValue ? (path as any).getValue() : path.node;
|
||||
const config = getSqlFmtConfig(options);
|
||||
@@ -129,11 +134,6 @@ const sqlPlugin: Plugin = {
|
||||
options,
|
||||
};
|
||||
|
||||
// Initialize the WASM module
|
||||
initSqlFmt().catch(error => {
|
||||
console.error('Failed to initialize SQL Format WASM module:', error);
|
||||
});
|
||||
|
||||
export default sqlPlugin;
|
||||
export { languages };
|
||||
export const parsers = sqlPlugin.parsers;
|
||||
|
||||
@@ -20,19 +20,30 @@ const webParser: Parser<string> = {
|
||||
locEnd: (text: string) => text.length,
|
||||
};
|
||||
|
||||
// Initialize web_fmt WASM module
|
||||
// Lazy initialize web_fmt WASM module
|
||||
let processorInstance: any = null;
|
||||
let initPromise: Promise<any> | null = null;
|
||||
|
||||
const getProcessorInstance = async () => {
|
||||
if (!processorInstance) {
|
||||
try {
|
||||
await webInit();
|
||||
processorInstance = { initialized: true };
|
||||
} catch (error) {
|
||||
console.warn('Failed to initialize web_fmt WASM module:', error);
|
||||
processorInstance = null;
|
||||
}
|
||||
if (!processorInstance && !initPromise) {
|
||||
initPromise = (async () => {
|
||||
try {
|
||||
await webInit();
|
||||
processorInstance = { initialized: true };
|
||||
return processorInstance;
|
||||
} catch (error) {
|
||||
console.warn('Failed to initialize web_fmt WASM module:', error);
|
||||
processorInstance = null;
|
||||
initPromise = null;
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
if (initPromise) {
|
||||
return await initPromise;
|
||||
}
|
||||
|
||||
return processorInstance;
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user