Project files

This commit is contained in:
2023-11-09 18:47:11 +01:00
parent 695abe054b
commit c415135aae
8554 changed files with 858111 additions and 0 deletions

View File

@@ -0,0 +1,602 @@
import { Source } from './source';
import { TokenKindEnum } from './tokenKind';
/**
* Contains a range of UTF-8 character offsets and token references that
* identify the region of the source from which the AST derived.
*/
export class Location {
/**
* The character offset at which this Node begins.
*/
readonly start: number;
/**
* The character offset at which this Node ends.
*/
readonly end: number;
/**
* The Token at which this Node begins.
*/
readonly startToken: Token;
/**
* The Token at which this Node ends.
*/
readonly endToken: Token;
/**
* The Source document the AST represents.
*/
readonly source: Source;
constructor(startToken: Token, endToken: Token, source: Source);
toJSON(): { start: number; end: number };
}
/**
* Represents a range of characters represented by a lexical token
* within a Source.
*/
export class Token {
/**
* The kind of Token.
*/
readonly kind: TokenKindEnum;
/**
* The character offset at which this Node begins.
*/
readonly start: number;
/**
* The character offset at which this Node ends.
*/
readonly end: number;
/**
* The 1-indexed line number on which this Token appears.
*/
readonly line: number;
/**
* The 1-indexed column number at which this Token begins.
*/
readonly column: number;
/**
* For non-punctuation tokens, represents the interpreted value of the token.
*/
readonly value: string | undefined;
/**
* Tokens exist as nodes in a double-linked-list amongst all tokens
* including ignored tokens. <SOF> is always the first node and <EOF>
* the last.
*/
readonly prev: Token | null;
readonly next: Token | null;
constructor(
kind: TokenKindEnum,
start: number,
end: number,
line: number,
column: number,
prev: Token | null,
value?: string,
);
toJSON(): {
kind: TokenKindEnum;
value: string | undefined;
line: number;
column: number;
};
}
/**
* @internal
*/
export function isNode(maybeNode: any): maybeNode is ASTNode;
/**
* The list of all possible AST node types.
*/
export type ASTNode =
| NameNode
| DocumentNode
| OperationDefinitionNode
| VariableDefinitionNode
| VariableNode
| SelectionSetNode
| FieldNode
| ArgumentNode
| FragmentSpreadNode
| InlineFragmentNode
| FragmentDefinitionNode
| IntValueNode
| FloatValueNode
| StringValueNode
| BooleanValueNode
| NullValueNode
| EnumValueNode
| ListValueNode
| ObjectValueNode
| ObjectFieldNode
| DirectiveNode
| NamedTypeNode
| ListTypeNode
| NonNullTypeNode
| SchemaDefinitionNode
| OperationTypeDefinitionNode
| ScalarTypeDefinitionNode
| ObjectTypeDefinitionNode
| FieldDefinitionNode
| InputValueDefinitionNode
| InterfaceTypeDefinitionNode
| UnionTypeDefinitionNode
| EnumTypeDefinitionNode
| EnumValueDefinitionNode
| InputObjectTypeDefinitionNode
| DirectiveDefinitionNode
| SchemaExtensionNode
| ScalarTypeExtensionNode
| ObjectTypeExtensionNode
| InterfaceTypeExtensionNode
| UnionTypeExtensionNode
| EnumTypeExtensionNode
| InputObjectTypeExtensionNode;
/**
* Utility type listing all nodes indexed by their kind.
*/
export interface ASTKindToNode {
Name: NameNode;
Document: DocumentNode;
OperationDefinition: OperationDefinitionNode;
VariableDefinition: VariableDefinitionNode;
Variable: VariableNode;
SelectionSet: SelectionSetNode;
Field: FieldNode;
Argument: ArgumentNode;
FragmentSpread: FragmentSpreadNode;
InlineFragment: InlineFragmentNode;
FragmentDefinition: FragmentDefinitionNode;
IntValue: IntValueNode;
FloatValue: FloatValueNode;
StringValue: StringValueNode;
BooleanValue: BooleanValueNode;
NullValue: NullValueNode;
EnumValue: EnumValueNode;
ListValue: ListValueNode;
ObjectValue: ObjectValueNode;
ObjectField: ObjectFieldNode;
Directive: DirectiveNode;
NamedType: NamedTypeNode;
ListType: ListTypeNode;
NonNullType: NonNullTypeNode;
SchemaDefinition: SchemaDefinitionNode;
OperationTypeDefinition: OperationTypeDefinitionNode;
ScalarTypeDefinition: ScalarTypeDefinitionNode;
ObjectTypeDefinition: ObjectTypeDefinitionNode;
FieldDefinition: FieldDefinitionNode;
InputValueDefinition: InputValueDefinitionNode;
InterfaceTypeDefinition: InterfaceTypeDefinitionNode;
UnionTypeDefinition: UnionTypeDefinitionNode;
EnumTypeDefinition: EnumTypeDefinitionNode;
EnumValueDefinition: EnumValueDefinitionNode;
InputObjectTypeDefinition: InputObjectTypeDefinitionNode;
DirectiveDefinition: DirectiveDefinitionNode;
SchemaExtension: SchemaExtensionNode;
ScalarTypeExtension: ScalarTypeExtensionNode;
ObjectTypeExtension: ObjectTypeExtensionNode;
InterfaceTypeExtension: InterfaceTypeExtensionNode;
UnionTypeExtension: UnionTypeExtensionNode;
EnumTypeExtension: EnumTypeExtensionNode;
InputObjectTypeExtension: InputObjectTypeExtensionNode;
}
// Name
export interface NameNode {
readonly kind: 'Name';
readonly loc?: Location;
readonly value: string;
}
// Document
export interface DocumentNode {
readonly kind: 'Document';
readonly loc?: Location;
readonly definitions: ReadonlyArray<DefinitionNode>;
}
export type DefinitionNode =
| ExecutableDefinitionNode
| TypeSystemDefinitionNode
| TypeSystemExtensionNode;
export type ExecutableDefinitionNode =
| OperationDefinitionNode
| FragmentDefinitionNode;
export interface OperationDefinitionNode {
readonly kind: 'OperationDefinition';
readonly loc?: Location;
readonly operation: OperationTypeNode;
readonly name?: NameNode;
readonly variableDefinitions?: ReadonlyArray<VariableDefinitionNode>;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly selectionSet: SelectionSetNode;
}
export type OperationTypeNode = 'query' | 'mutation' | 'subscription';
export interface VariableDefinitionNode {
readonly kind: 'VariableDefinition';
readonly loc?: Location;
readonly variable: VariableNode;
readonly type: TypeNode;
readonly defaultValue?: ValueNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
}
export interface VariableNode {
readonly kind: 'Variable';
readonly loc?: Location;
readonly name: NameNode;
}
export interface SelectionSetNode {
kind: 'SelectionSet';
loc?: Location;
selections: ReadonlyArray<SelectionNode>;
}
export type SelectionNode = FieldNode | FragmentSpreadNode | InlineFragmentNode;
export interface FieldNode {
readonly kind: 'Field';
readonly loc?: Location;
readonly alias?: NameNode;
readonly name: NameNode;
readonly arguments?: ReadonlyArray<ArgumentNode>;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly selectionSet?: SelectionSetNode;
}
export interface ArgumentNode {
readonly kind: 'Argument';
readonly loc?: Location;
readonly name: NameNode;
readonly value: ValueNode;
}
// Fragments
export interface FragmentSpreadNode {
readonly kind: 'FragmentSpread';
readonly loc?: Location;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
}
export interface InlineFragmentNode {
readonly kind: 'InlineFragment';
readonly loc?: Location;
readonly typeCondition?: NamedTypeNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly selectionSet: SelectionSetNode;
}
export interface FragmentDefinitionNode {
readonly kind: 'FragmentDefinition';
readonly loc?: Location;
readonly name: NameNode;
// Note: fragment variable definitions are experimental and may be changed
// or removed in the future.
readonly variableDefinitions?: ReadonlyArray<VariableDefinitionNode>;
readonly typeCondition: NamedTypeNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly selectionSet: SelectionSetNode;
}
// Values
export type ValueNode =
| VariableNode
| IntValueNode
| FloatValueNode
| StringValueNode
| BooleanValueNode
| NullValueNode
| EnumValueNode
| ListValueNode
| ObjectValueNode;
export interface IntValueNode {
readonly kind: 'IntValue';
readonly loc?: Location;
readonly value: string;
}
export interface FloatValueNode {
readonly kind: 'FloatValue';
readonly loc?: Location;
readonly value: string;
}
export interface StringValueNode {
readonly kind: 'StringValue';
readonly loc?: Location;
readonly value: string;
readonly block?: boolean;
}
export interface BooleanValueNode {
readonly kind: 'BooleanValue';
readonly loc?: Location;
readonly value: boolean;
}
export interface NullValueNode {
readonly kind: 'NullValue';
readonly loc?: Location;
}
export interface EnumValueNode {
readonly kind: 'EnumValue';
readonly loc?: Location;
readonly value: string;
}
export interface ListValueNode {
readonly kind: 'ListValue';
readonly loc?: Location;
readonly values: ReadonlyArray<ValueNode>;
}
export interface ObjectValueNode {
readonly kind: 'ObjectValue';
readonly loc?: Location;
readonly fields: ReadonlyArray<ObjectFieldNode>;
}
export interface ObjectFieldNode {
readonly kind: 'ObjectField';
readonly loc?: Location;
readonly name: NameNode;
readonly value: ValueNode;
}
// Directives
export interface DirectiveNode {
readonly kind: 'Directive';
readonly loc?: Location;
readonly name: NameNode;
readonly arguments?: ReadonlyArray<ArgumentNode>;
}
// Type Reference
export type TypeNode = NamedTypeNode | ListTypeNode | NonNullTypeNode;
export interface NamedTypeNode {
readonly kind: 'NamedType';
readonly loc?: Location;
readonly name: NameNode;
}
export interface ListTypeNode {
readonly kind: 'ListType';
readonly loc?: Location;
readonly type: TypeNode;
}
export interface NonNullTypeNode {
readonly kind: 'NonNullType';
readonly loc?: Location;
readonly type: NamedTypeNode | ListTypeNode;
}
// Type System Definition
export type TypeSystemDefinitionNode =
| SchemaDefinitionNode
| TypeDefinitionNode
| DirectiveDefinitionNode;
export interface SchemaDefinitionNode {
readonly kind: 'SchemaDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly operationTypes: ReadonlyArray<OperationTypeDefinitionNode>;
}
export interface OperationTypeDefinitionNode {
readonly kind: 'OperationTypeDefinition';
readonly loc?: Location;
readonly operation: OperationTypeNode;
readonly type: NamedTypeNode;
}
// Type Definition
export type TypeDefinitionNode =
| ScalarTypeDefinitionNode
| ObjectTypeDefinitionNode
| InterfaceTypeDefinitionNode
| UnionTypeDefinitionNode
| EnumTypeDefinitionNode
| InputObjectTypeDefinitionNode;
export interface ScalarTypeDefinitionNode {
readonly kind: 'ScalarTypeDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
}
export interface ObjectTypeDefinitionNode {
readonly kind: 'ObjectTypeDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly interfaces?: ReadonlyArray<NamedTypeNode>;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly fields?: ReadonlyArray<FieldDefinitionNode>;
}
export interface FieldDefinitionNode {
readonly kind: 'FieldDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly arguments?: ReadonlyArray<InputValueDefinitionNode>;
readonly type: TypeNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
}
export interface InputValueDefinitionNode {
readonly kind: 'InputValueDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly type: TypeNode;
readonly defaultValue?: ValueNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
}
export interface InterfaceTypeDefinitionNode {
readonly kind: 'InterfaceTypeDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly interfaces?: ReadonlyArray<NamedTypeNode>;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly fields?: ReadonlyArray<FieldDefinitionNode>;
}
export interface UnionTypeDefinitionNode {
readonly kind: 'UnionTypeDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly types?: ReadonlyArray<NamedTypeNode>;
}
export interface EnumTypeDefinitionNode {
readonly kind: 'EnumTypeDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly values?: ReadonlyArray<EnumValueDefinitionNode>;
}
export interface EnumValueDefinitionNode {
readonly kind: 'EnumValueDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
}
export interface InputObjectTypeDefinitionNode {
readonly kind: 'InputObjectTypeDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly fields?: ReadonlyArray<InputValueDefinitionNode>;
}
// Directive Definitions
export interface DirectiveDefinitionNode {
readonly kind: 'DirectiveDefinition';
readonly loc?: Location;
readonly description?: StringValueNode;
readonly name: NameNode;
readonly arguments?: ReadonlyArray<InputValueDefinitionNode>;
readonly repeatable: boolean;
readonly locations: ReadonlyArray<NameNode>;
}
// Type System Extensions
export type TypeSystemExtensionNode = SchemaExtensionNode | TypeExtensionNode;
export interface SchemaExtensionNode {
readonly kind: 'SchemaExtension';
readonly loc?: Location;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly operationTypes?: ReadonlyArray<OperationTypeDefinitionNode>;
}
// Type Extensions
export type TypeExtensionNode =
| ScalarTypeExtensionNode
| ObjectTypeExtensionNode
| InterfaceTypeExtensionNode
| UnionTypeExtensionNode
| EnumTypeExtensionNode
| InputObjectTypeExtensionNode;
export interface ScalarTypeExtensionNode {
readonly kind: 'ScalarTypeExtension';
readonly loc?: Location;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
}
export interface ObjectTypeExtensionNode {
readonly kind: 'ObjectTypeExtension';
readonly loc?: Location;
readonly name: NameNode;
readonly interfaces?: ReadonlyArray<NamedTypeNode>;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly fields?: ReadonlyArray<FieldDefinitionNode>;
}
export interface InterfaceTypeExtensionNode {
readonly kind: 'InterfaceTypeExtension';
readonly loc?: Location;
readonly name: NameNode;
readonly interfaces?: ReadonlyArray<NamedTypeNode>;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly fields?: ReadonlyArray<FieldDefinitionNode>;
}
export interface UnionTypeExtensionNode {
readonly kind: 'UnionTypeExtension';
readonly loc?: Location;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly types?: ReadonlyArray<NamedTypeNode>;
}
export interface EnumTypeExtensionNode {
readonly kind: 'EnumTypeExtension';
readonly loc?: Location;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly values?: ReadonlyArray<EnumValueDefinitionNode>;
}
export interface InputObjectTypeExtensionNode {
readonly kind: 'InputObjectTypeExtension';
readonly loc?: Location;
readonly name: NameNode;
readonly directives?: ReadonlyArray<DirectiveNode>;
readonly fields?: ReadonlyArray<InputValueDefinitionNode>;
}

View File

@@ -0,0 +1,132 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.isNode = isNode;
exports.Token = exports.Location = void 0;
var _defineInspect = _interopRequireDefault(require("../jsutils/defineInspect.js"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Contains a range of UTF-8 character offsets and token references that
* identify the region of the source from which the AST derived.
*/
var Location = /*#__PURE__*/function () {
/**
* The character offset at which this Node begins.
*/
/**
* The character offset at which this Node ends.
*/
/**
* The Token at which this Node begins.
*/
/**
* The Token at which this Node ends.
*/
/**
* The Source document the AST represents.
*/
function Location(startToken, endToken, source) {
this.start = startToken.start;
this.end = endToken.end;
this.startToken = startToken;
this.endToken = endToken;
this.source = source;
}
var _proto = Location.prototype;
_proto.toJSON = function toJSON() {
return {
start: this.start,
end: this.end
};
};
return Location;
}(); // Print a simplified form when appearing in `inspect` and `util.inspect`.
exports.Location = Location;
(0, _defineInspect.default)(Location);
/**
* Represents a range of characters represented by a lexical token
* within a Source.
*/
var Token = /*#__PURE__*/function () {
/**
* The kind of Token.
*/
/**
* The character offset at which this Node begins.
*/
/**
* The character offset at which this Node ends.
*/
/**
* The 1-indexed line number on which this Token appears.
*/
/**
* The 1-indexed column number at which this Token begins.
*/
/**
* For non-punctuation tokens, represents the interpreted value of the token.
*/
/**
* Tokens exist as nodes in a double-linked-list amongst all tokens
* including ignored tokens. <SOF> is always the first node and <EOF>
* the last.
*/
function Token(kind, start, end, line, column, prev, value) {
this.kind = kind;
this.start = start;
this.end = end;
this.line = line;
this.column = column;
this.value = value;
this.prev = prev;
this.next = null;
}
var _proto2 = Token.prototype;
_proto2.toJSON = function toJSON() {
return {
kind: this.kind,
value: this.value,
line: this.line,
column: this.column
};
};
return Token;
}(); // Print a simplified form when appearing in `inspect` and `util.inspect`.
exports.Token = Token;
(0, _defineInspect.default)(Token);
/**
* @internal
*/
function isNode(maybeNode) {
return maybeNode != null && typeof maybeNode.kind === 'string';
}
/**
* The list of all possible AST node types.
*/

View File

@@ -0,0 +1,637 @@
// @flow strict
import defineInspect from '../jsutils/defineInspect';
import type { Source } from './source';
import type { TokenKindEnum } from './tokenKind';
/**
* Contains a range of UTF-8 character offsets and token references that
* identify the region of the source from which the AST derived.
*/
export class Location {
/**
* The character offset at which this Node begins.
*/
+start: number;
/**
* The character offset at which this Node ends.
*/
+end: number;
/**
* The Token at which this Node begins.
*/
+startToken: Token;
/**
* The Token at which this Node ends.
*/
+endToken: Token;
/**
* The Source document the AST represents.
*/
+source: Source;
constructor(startToken: Token, endToken: Token, source: Source) {
this.start = startToken.start;
this.end = endToken.end;
this.startToken = startToken;
this.endToken = endToken;
this.source = source;
}
toJSON(): {| start: number, end: number |} {
return { start: this.start, end: this.end };
}
}
// Print a simplified form when appearing in `inspect` and `util.inspect`.
defineInspect(Location);
/**
* Represents a range of characters represented by a lexical token
* within a Source.
*/
export class Token {
/**
* The kind of Token.
*/
+kind: TokenKindEnum;
/**
* The character offset at which this Node begins.
*/
+start: number;
/**
* The character offset at which this Node ends.
*/
+end: number;
/**
* The 1-indexed line number on which this Token appears.
*/
+line: number;
/**
* The 1-indexed column number at which this Token begins.
*/
+column: number;
/**
* For non-punctuation tokens, represents the interpreted value of the token.
*/
+value: string | void;
/**
* Tokens exist as nodes in a double-linked-list amongst all tokens
* including ignored tokens. <SOF> is always the first node and <EOF>
* the last.
*/
+prev: Token | null;
+next: Token | null;
constructor(
kind: TokenKindEnum,
start: number,
end: number,
line: number,
column: number,
prev: Token | null,
value?: string,
) {
this.kind = kind;
this.start = start;
this.end = end;
this.line = line;
this.column = column;
this.value = value;
this.prev = prev;
this.next = null;
}
toJSON(): {|
kind: TokenKindEnum,
value: string | void,
line: number,
column: number,
|} {
return {
kind: this.kind,
value: this.value,
line: this.line,
column: this.column,
};
}
}
// Print a simplified form when appearing in `inspect` and `util.inspect`.
defineInspect(Token);
/**
* @internal
*/
export function isNode(maybeNode: mixed): boolean %checks {
return maybeNode != null && typeof maybeNode.kind === 'string';
}
/**
* The list of all possible AST node types.
*/
export type ASTNode =
| NameNode
| DocumentNode
| OperationDefinitionNode
| VariableDefinitionNode
| VariableNode
| SelectionSetNode
| FieldNode
| ArgumentNode
| FragmentSpreadNode
| InlineFragmentNode
| FragmentDefinitionNode
| IntValueNode
| FloatValueNode
| StringValueNode
| BooleanValueNode
| NullValueNode
| EnumValueNode
| ListValueNode
| ObjectValueNode
| ObjectFieldNode
| DirectiveNode
| NamedTypeNode
| ListTypeNode
| NonNullTypeNode
| SchemaDefinitionNode
| OperationTypeDefinitionNode
| ScalarTypeDefinitionNode
| ObjectTypeDefinitionNode
| FieldDefinitionNode
| InputValueDefinitionNode
| InterfaceTypeDefinitionNode
| UnionTypeDefinitionNode
| EnumTypeDefinitionNode
| EnumValueDefinitionNode
| InputObjectTypeDefinitionNode
| DirectiveDefinitionNode
| SchemaExtensionNode
| ScalarTypeExtensionNode
| ObjectTypeExtensionNode
| InterfaceTypeExtensionNode
| UnionTypeExtensionNode
| EnumTypeExtensionNode
| InputObjectTypeExtensionNode;
/**
* Utility type listing all nodes indexed by their kind.
*/
export type ASTKindToNode = {|
Name: NameNode,
Document: DocumentNode,
OperationDefinition: OperationDefinitionNode,
VariableDefinition: VariableDefinitionNode,
Variable: VariableNode,
SelectionSet: SelectionSetNode,
Field: FieldNode,
Argument: ArgumentNode,
FragmentSpread: FragmentSpreadNode,
InlineFragment: InlineFragmentNode,
FragmentDefinition: FragmentDefinitionNode,
IntValue: IntValueNode,
FloatValue: FloatValueNode,
StringValue: StringValueNode,
BooleanValue: BooleanValueNode,
NullValue: NullValueNode,
EnumValue: EnumValueNode,
ListValue: ListValueNode,
ObjectValue: ObjectValueNode,
ObjectField: ObjectFieldNode,
Directive: DirectiveNode,
NamedType: NamedTypeNode,
ListType: ListTypeNode,
NonNullType: NonNullTypeNode,
SchemaDefinition: SchemaDefinitionNode,
OperationTypeDefinition: OperationTypeDefinitionNode,
ScalarTypeDefinition: ScalarTypeDefinitionNode,
ObjectTypeDefinition: ObjectTypeDefinitionNode,
FieldDefinition: FieldDefinitionNode,
InputValueDefinition: InputValueDefinitionNode,
InterfaceTypeDefinition: InterfaceTypeDefinitionNode,
UnionTypeDefinition: UnionTypeDefinitionNode,
EnumTypeDefinition: EnumTypeDefinitionNode,
EnumValueDefinition: EnumValueDefinitionNode,
InputObjectTypeDefinition: InputObjectTypeDefinitionNode,
DirectiveDefinition: DirectiveDefinitionNode,
SchemaExtension: SchemaExtensionNode,
ScalarTypeExtension: ScalarTypeExtensionNode,
ObjectTypeExtension: ObjectTypeExtensionNode,
InterfaceTypeExtension: InterfaceTypeExtensionNode,
UnionTypeExtension: UnionTypeExtensionNode,
EnumTypeExtension: EnumTypeExtensionNode,
InputObjectTypeExtension: InputObjectTypeExtensionNode,
|};
// Name
export type NameNode = {|
+kind: 'Name',
+loc?: Location,
+value: string,
|};
// Document
export type DocumentNode = {|
+kind: 'Document',
+loc?: Location,
+definitions: $ReadOnlyArray<DefinitionNode>,
|};
export type DefinitionNode =
| ExecutableDefinitionNode
| TypeSystemDefinitionNode
| TypeSystemExtensionNode;
export type ExecutableDefinitionNode =
| OperationDefinitionNode
| FragmentDefinitionNode;
export type OperationDefinitionNode = {|
+kind: 'OperationDefinition',
+loc?: Location,
+operation: OperationTypeNode,
+name?: NameNode,
+variableDefinitions?: $ReadOnlyArray<VariableDefinitionNode>,
+directives?: $ReadOnlyArray<DirectiveNode>,
+selectionSet: SelectionSetNode,
|};
export type OperationTypeNode = 'query' | 'mutation' | 'subscription';
export type VariableDefinitionNode = {|
+kind: 'VariableDefinition',
+loc?: Location,
+variable: VariableNode,
+type: TypeNode,
+defaultValue?: ValueNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
|};
export type VariableNode = {|
+kind: 'Variable',
+loc?: Location,
+name: NameNode,
|};
export type SelectionSetNode = {|
kind: 'SelectionSet',
loc?: Location,
selections: $ReadOnlyArray<SelectionNode>,
|};
export type SelectionNode = FieldNode | FragmentSpreadNode | InlineFragmentNode;
export type FieldNode = {|
+kind: 'Field',
+loc?: Location,
+alias?: NameNode,
+name: NameNode,
+arguments?: $ReadOnlyArray<ArgumentNode>,
+directives?: $ReadOnlyArray<DirectiveNode>,
+selectionSet?: SelectionSetNode,
|};
export type ArgumentNode = {|
+kind: 'Argument',
+loc?: Location,
+name: NameNode,
+value: ValueNode,
|};
// Fragments
export type FragmentSpreadNode = {|
+kind: 'FragmentSpread',
+loc?: Location,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
|};
export type InlineFragmentNode = {|
+kind: 'InlineFragment',
+loc?: Location,
+typeCondition?: NamedTypeNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
+selectionSet: SelectionSetNode,
|};
export type FragmentDefinitionNode = {|
+kind: 'FragmentDefinition',
+loc?: Location,
+name: NameNode,
// Note: fragment variable definitions are experimental and may be changed
// or removed in the future.
+variableDefinitions?: $ReadOnlyArray<VariableDefinitionNode>,
+typeCondition: NamedTypeNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
+selectionSet: SelectionSetNode,
|};
// Values
export type ValueNode =
| VariableNode
| IntValueNode
| FloatValueNode
| StringValueNode
| BooleanValueNode
| NullValueNode
| EnumValueNode
| ListValueNode
| ObjectValueNode;
export type IntValueNode = {|
+kind: 'IntValue',
+loc?: Location,
+value: string,
|};
export type FloatValueNode = {|
+kind: 'FloatValue',
+loc?: Location,
+value: string,
|};
export type StringValueNode = {|
+kind: 'StringValue',
+loc?: Location,
+value: string,
+block?: boolean,
|};
export type BooleanValueNode = {|
+kind: 'BooleanValue',
+loc?: Location,
+value: boolean,
|};
export type NullValueNode = {|
+kind: 'NullValue',
+loc?: Location,
|};
export type EnumValueNode = {|
+kind: 'EnumValue',
+loc?: Location,
+value: string,
|};
export type ListValueNode = {|
+kind: 'ListValue',
+loc?: Location,
+values: $ReadOnlyArray<ValueNode>,
|};
export type ObjectValueNode = {|
+kind: 'ObjectValue',
+loc?: Location,
+fields: $ReadOnlyArray<ObjectFieldNode>,
|};
export type ObjectFieldNode = {|
+kind: 'ObjectField',
+loc?: Location,
+name: NameNode,
+value: ValueNode,
|};
// Directives
export type DirectiveNode = {|
+kind: 'Directive',
+loc?: Location,
+name: NameNode,
+arguments?: $ReadOnlyArray<ArgumentNode>,
|};
// Type Reference
export type TypeNode = NamedTypeNode | ListTypeNode | NonNullTypeNode;
export type NamedTypeNode = {|
+kind: 'NamedType',
+loc?: Location,
+name: NameNode,
|};
export type ListTypeNode = {|
+kind: 'ListType',
+loc?: Location,
+type: TypeNode,
|};
export type NonNullTypeNode = {|
+kind: 'NonNullType',
+loc?: Location,
+type: NamedTypeNode | ListTypeNode,
|};
// Type System Definition
export type TypeSystemDefinitionNode =
| SchemaDefinitionNode
| TypeDefinitionNode
| DirectiveDefinitionNode;
export type SchemaDefinitionNode = {|
+kind: 'SchemaDefinition',
+loc?: Location,
+description?: StringValueNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
+operationTypes: $ReadOnlyArray<OperationTypeDefinitionNode>,
|};
export type OperationTypeDefinitionNode = {|
+kind: 'OperationTypeDefinition',
+loc?: Location,
+operation: OperationTypeNode,
+type: NamedTypeNode,
|};
// Type Definition
export type TypeDefinitionNode =
| ScalarTypeDefinitionNode
| ObjectTypeDefinitionNode
| InterfaceTypeDefinitionNode
| UnionTypeDefinitionNode
| EnumTypeDefinitionNode
| InputObjectTypeDefinitionNode;
export type ScalarTypeDefinitionNode = {|
+kind: 'ScalarTypeDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
|};
export type ObjectTypeDefinitionNode = {|
+kind: 'ObjectTypeDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+interfaces?: $ReadOnlyArray<NamedTypeNode>,
+directives?: $ReadOnlyArray<DirectiveNode>,
+fields?: $ReadOnlyArray<FieldDefinitionNode>,
|};
export type FieldDefinitionNode = {|
+kind: 'FieldDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+arguments?: $ReadOnlyArray<InputValueDefinitionNode>,
+type: TypeNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
|};
export type InputValueDefinitionNode = {|
+kind: 'InputValueDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+type: TypeNode,
+defaultValue?: ValueNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
|};
export type InterfaceTypeDefinitionNode = {|
+kind: 'InterfaceTypeDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+interfaces?: $ReadOnlyArray<NamedTypeNode>,
+directives?: $ReadOnlyArray<DirectiveNode>,
+fields?: $ReadOnlyArray<FieldDefinitionNode>,
|};
export type UnionTypeDefinitionNode = {|
+kind: 'UnionTypeDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
+types?: $ReadOnlyArray<NamedTypeNode>,
|};
export type EnumTypeDefinitionNode = {|
+kind: 'EnumTypeDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
+values?: $ReadOnlyArray<EnumValueDefinitionNode>,
|};
export type EnumValueDefinitionNode = {|
+kind: 'EnumValueDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
|};
export type InputObjectTypeDefinitionNode = {|
+kind: 'InputObjectTypeDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
+fields?: $ReadOnlyArray<InputValueDefinitionNode>,
|};
// Directive Definitions
export type DirectiveDefinitionNode = {|
+kind: 'DirectiveDefinition',
+loc?: Location,
+description?: StringValueNode,
+name: NameNode,
+arguments?: $ReadOnlyArray<InputValueDefinitionNode>,
+repeatable: boolean,
+locations: $ReadOnlyArray<NameNode>,
|};
// Type System Extensions
export type TypeSystemExtensionNode = SchemaExtensionNode | TypeExtensionNode;
export type SchemaExtensionNode = {|
+kind: 'SchemaExtension',
+loc?: Location,
+directives?: $ReadOnlyArray<DirectiveNode>,
+operationTypes?: $ReadOnlyArray<OperationTypeDefinitionNode>,
|};
// Type Extensions
export type TypeExtensionNode =
| ScalarTypeExtensionNode
| ObjectTypeExtensionNode
| InterfaceTypeExtensionNode
| UnionTypeExtensionNode
| EnumTypeExtensionNode
| InputObjectTypeExtensionNode;
export type ScalarTypeExtensionNode = {|
+kind: 'ScalarTypeExtension',
+loc?: Location,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
|};
export type ObjectTypeExtensionNode = {|
+kind: 'ObjectTypeExtension',
+loc?: Location,
+name: NameNode,
+interfaces?: $ReadOnlyArray<NamedTypeNode>,
+directives?: $ReadOnlyArray<DirectiveNode>,
+fields?: $ReadOnlyArray<FieldDefinitionNode>,
|};
export type InterfaceTypeExtensionNode = {|
+kind: 'InterfaceTypeExtension',
+loc?: Location,
+name: NameNode,
+interfaces?: $ReadOnlyArray<NamedTypeNode>,
+directives?: $ReadOnlyArray<DirectiveNode>,
+fields?: $ReadOnlyArray<FieldDefinitionNode>,
|};
export type UnionTypeExtensionNode = {|
+kind: 'UnionTypeExtension',
+loc?: Location,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
+types?: $ReadOnlyArray<NamedTypeNode>,
|};
export type EnumTypeExtensionNode = {|
+kind: 'EnumTypeExtension',
+loc?: Location,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
+values?: $ReadOnlyArray<EnumValueDefinitionNode>,
|};
export type InputObjectTypeExtensionNode = {|
+kind: 'InputObjectTypeExtension',
+loc?: Location,
+name: NameNode,
+directives?: $ReadOnlyArray<DirectiveNode>,
+fields?: $ReadOnlyArray<InputValueDefinitionNode>,
|};

View File

@@ -0,0 +1,118 @@
import defineInspect from "../jsutils/defineInspect.mjs";
/**
* Contains a range of UTF-8 character offsets and token references that
* identify the region of the source from which the AST derived.
*/
export var Location = /*#__PURE__*/function () {
/**
* The character offset at which this Node begins.
*/
/**
* The character offset at which this Node ends.
*/
/**
* The Token at which this Node begins.
*/
/**
* The Token at which this Node ends.
*/
/**
* The Source document the AST represents.
*/
function Location(startToken, endToken, source) {
this.start = startToken.start;
this.end = endToken.end;
this.startToken = startToken;
this.endToken = endToken;
this.source = source;
}
var _proto = Location.prototype;
_proto.toJSON = function toJSON() {
return {
start: this.start,
end: this.end
};
};
return Location;
}(); // Print a simplified form when appearing in `inspect` and `util.inspect`.
defineInspect(Location);
/**
* Represents a range of characters represented by a lexical token
* within a Source.
*/
export var Token = /*#__PURE__*/function () {
/**
* The kind of Token.
*/
/**
* The character offset at which this Node begins.
*/
/**
* The character offset at which this Node ends.
*/
/**
* The 1-indexed line number on which this Token appears.
*/
/**
* The 1-indexed column number at which this Token begins.
*/
/**
* For non-punctuation tokens, represents the interpreted value of the token.
*/
/**
* Tokens exist as nodes in a double-linked-list amongst all tokens
* including ignored tokens. <SOF> is always the first node and <EOF>
* the last.
*/
function Token(kind, start, end, line, column, prev, value) {
this.kind = kind;
this.start = start;
this.end = end;
this.line = line;
this.column = column;
this.value = value;
this.prev = prev;
this.next = null;
}
var _proto2 = Token.prototype;
_proto2.toJSON = function toJSON() {
return {
kind: this.kind,
value: this.value,
line: this.line,
column: this.column
};
};
return Token;
}(); // Print a simplified form when appearing in `inspect` and `util.inspect`.
defineInspect(Token);
/**
* @internal
*/
export function isNode(maybeNode) {
return maybeNode != null && typeof maybeNode.kind === 'string';
}
/**
* The list of all possible AST node types.
*/

View File

@@ -0,0 +1,23 @@
/**
* Produces the value of a block string from its parsed raw value, similar to
* CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc.
*
* This implements the GraphQL spec's BlockStringValue() static algorithm.
*/
export function dedentBlockStringValue(rawString: string): string;
/**
* @internal
*/
export function getBlockStringIndentation(body: string): number;
/**
* Print a block string in the indented block form by adding a leading and
* trailing blank line. However, if a block string starts with whitespace and is
* a single-line, adding a leading blank line would strip that whitespace.
*/
export function printBlockString(
value: string,
indentation?: string,
preferMultipleLines?: boolean,
): string;

View File

@@ -0,0 +1,134 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.dedentBlockStringValue = dedentBlockStringValue;
exports.getBlockStringIndentation = getBlockStringIndentation;
exports.printBlockString = printBlockString;
/**
* Produces the value of a block string from its parsed raw value, similar to
* CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc.
*
* This implements the GraphQL spec's BlockStringValue() static algorithm.
*
* @internal
*/
function dedentBlockStringValue(rawString) {
// Expand a block string's raw value into independent lines.
var lines = rawString.split(/\r\n|[\n\r]/g); // Remove common indentation from all lines but first.
var commonIndent = getBlockStringIndentation(rawString);
if (commonIndent !== 0) {
for (var i = 1; i < lines.length; i++) {
lines[i] = lines[i].slice(commonIndent);
}
} // Remove leading and trailing blank lines.
var startLine = 0;
while (startLine < lines.length && isBlank(lines[startLine])) {
++startLine;
}
var endLine = lines.length;
while (endLine > startLine && isBlank(lines[endLine - 1])) {
--endLine;
} // Return a string of the lines joined with U+000A.
return lines.slice(startLine, endLine).join('\n');
}
function isBlank(str) {
for (var i = 0; i < str.length; ++i) {
if (str[i] !== ' ' && str[i] !== '\t') {
return false;
}
}
return true;
}
/**
* @internal
*/
function getBlockStringIndentation(value) {
var _commonIndent;
var isFirstLine = true;
var isEmptyLine = true;
var indent = 0;
var commonIndent = null;
for (var i = 0; i < value.length; ++i) {
switch (value.charCodeAt(i)) {
case 13:
// \r
if (value.charCodeAt(i + 1) === 10) {
++i; // skip \r\n as one symbol
}
// falls through
case 10:
// \n
isFirstLine = false;
isEmptyLine = true;
indent = 0;
break;
case 9: // \t
case 32:
// <space>
++indent;
break;
default:
if (isEmptyLine && !isFirstLine && (commonIndent === null || indent < commonIndent)) {
commonIndent = indent;
}
isEmptyLine = false;
}
}
return (_commonIndent = commonIndent) !== null && _commonIndent !== void 0 ? _commonIndent : 0;
}
/**
* Print a block string in the indented block form by adding a leading and
* trailing blank line. However, if a block string starts with whitespace and is
* a single-line, adding a leading blank line would strip that whitespace.
*
* @internal
*/
function printBlockString(value) {
var indentation = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
var preferMultipleLines = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
var isSingleLine = value.indexOf('\n') === -1;
var hasLeadingSpace = value[0] === ' ' || value[0] === '\t';
var hasTrailingQuote = value[value.length - 1] === '"';
var hasTrailingSlash = value[value.length - 1] === '\\';
var printAsMultipleLines = !isSingleLine || hasTrailingQuote || hasTrailingSlash || preferMultipleLines;
var result = ''; // Format a multi-line block quote to account for leading space.
if (printAsMultipleLines && !(isSingleLine && hasLeadingSpace)) {
result += '\n' + indentation;
}
result += indentation ? value.replace(/\n/g, '\n' + indentation) : value;
if (printAsMultipleLines) {
result += '\n';
}
return '"""' + result.replace(/"""/g, '\\"""') + '"""';
}

View File

@@ -0,0 +1,121 @@
// @flow strict
/**
* Produces the value of a block string from its parsed raw value, similar to
* CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc.
*
* This implements the GraphQL spec's BlockStringValue() static algorithm.
*
* @internal
*/
export function dedentBlockStringValue(rawString: string): string {
// Expand a block string's raw value into independent lines.
const lines = rawString.split(/\r\n|[\n\r]/g);
// Remove common indentation from all lines but first.
const commonIndent = getBlockStringIndentation(rawString);
if (commonIndent !== 0) {
for (let i = 1; i < lines.length; i++) {
lines[i] = lines[i].slice(commonIndent);
}
}
// Remove leading and trailing blank lines.
let startLine = 0;
while (startLine < lines.length && isBlank(lines[startLine])) {
++startLine;
}
let endLine = lines.length;
while (endLine > startLine && isBlank(lines[endLine - 1])) {
--endLine;
}
// Return a string of the lines joined with U+000A.
return lines.slice(startLine, endLine).join('\n');
}
function isBlank(str: string): boolean {
for (let i = 0; i < str.length; ++i) {
if (str[i] !== ' ' && str[i] !== '\t') {
return false;
}
}
return true;
}
/**
* @internal
*/
export function getBlockStringIndentation(value: string): number {
let isFirstLine = true;
let isEmptyLine = true;
let indent = 0;
let commonIndent = null;
for (let i = 0; i < value.length; ++i) {
switch (value.charCodeAt(i)) {
case 13: // \r
if (value.charCodeAt(i + 1) === 10) {
++i; // skip \r\n as one symbol
}
// falls through
case 10: // \n
isFirstLine = false;
isEmptyLine = true;
indent = 0;
break;
case 9: // \t
case 32: // <space>
++indent;
break;
default:
if (
isEmptyLine &&
!isFirstLine &&
(commonIndent === null || indent < commonIndent)
) {
commonIndent = indent;
}
isEmptyLine = false;
}
}
return commonIndent ?? 0;
}
/**
* Print a block string in the indented block form by adding a leading and
* trailing blank line. However, if a block string starts with whitespace and is
* a single-line, adding a leading blank line would strip that whitespace.
*
* @internal
*/
export function printBlockString(
value: string,
indentation: string = '',
preferMultipleLines: boolean = false,
): string {
const isSingleLine = value.indexOf('\n') === -1;
const hasLeadingSpace = value[0] === ' ' || value[0] === '\t';
const hasTrailingQuote = value[value.length - 1] === '"';
const hasTrailingSlash = value[value.length - 1] === '\\';
const printAsMultipleLines =
!isSingleLine ||
hasTrailingQuote ||
hasTrailingSlash ||
preferMultipleLines;
let result = '';
// Format a multi-line block quote to account for leading space.
if (printAsMultipleLines && !(isSingleLine && hasLeadingSpace)) {
result += '\n' + indentation;
}
result += indentation ? value.replace(/\n/g, '\n' + indentation) : value;
if (printAsMultipleLines) {
result += '\n';
}
return '"""' + result.replace(/"""/g, '\\"""') + '"""';
}

View File

@@ -0,0 +1,124 @@
/**
* Produces the value of a block string from its parsed raw value, similar to
* CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc.
*
* This implements the GraphQL spec's BlockStringValue() static algorithm.
*
* @internal
*/
export function dedentBlockStringValue(rawString) {
// Expand a block string's raw value into independent lines.
var lines = rawString.split(/\r\n|[\n\r]/g); // Remove common indentation from all lines but first.
var commonIndent = getBlockStringIndentation(rawString);
if (commonIndent !== 0) {
for (var i = 1; i < lines.length; i++) {
lines[i] = lines[i].slice(commonIndent);
}
} // Remove leading and trailing blank lines.
var startLine = 0;
while (startLine < lines.length && isBlank(lines[startLine])) {
++startLine;
}
var endLine = lines.length;
while (endLine > startLine && isBlank(lines[endLine - 1])) {
--endLine;
} // Return a string of the lines joined with U+000A.
return lines.slice(startLine, endLine).join('\n');
}
function isBlank(str) {
for (var i = 0; i < str.length; ++i) {
if (str[i] !== ' ' && str[i] !== '\t') {
return false;
}
}
return true;
}
/**
* @internal
*/
export function getBlockStringIndentation(value) {
var _commonIndent;
var isFirstLine = true;
var isEmptyLine = true;
var indent = 0;
var commonIndent = null;
for (var i = 0; i < value.length; ++i) {
switch (value.charCodeAt(i)) {
case 13:
// \r
if (value.charCodeAt(i + 1) === 10) {
++i; // skip \r\n as one symbol
}
// falls through
case 10:
// \n
isFirstLine = false;
isEmptyLine = true;
indent = 0;
break;
case 9: // \t
case 32:
// <space>
++indent;
break;
default:
if (isEmptyLine && !isFirstLine && (commonIndent === null || indent < commonIndent)) {
commonIndent = indent;
}
isEmptyLine = false;
}
}
return (_commonIndent = commonIndent) !== null && _commonIndent !== void 0 ? _commonIndent : 0;
}
/**
* Print a block string in the indented block form by adding a leading and
* trailing blank line. However, if a block string starts with whitespace and is
* a single-line, adding a leading blank line would strip that whitespace.
*
* @internal
*/
export function printBlockString(value) {
var indentation = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
var preferMultipleLines = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
var isSingleLine = value.indexOf('\n') === -1;
var hasLeadingSpace = value[0] === ' ' || value[0] === '\t';
var hasTrailingQuote = value[value.length - 1] === '"';
var hasTrailingSlash = value[value.length - 1] === '\\';
var printAsMultipleLines = !isSingleLine || hasTrailingQuote || hasTrailingSlash || preferMultipleLines;
var result = ''; // Format a multi-line block quote to account for leading space.
if (printAsMultipleLines && !(isSingleLine && hasLeadingSpace)) {
result += '\n' + indentation;
}
result += indentation ? value.replace(/\n/g, '\n' + indentation) : value;
if (printAsMultipleLines) {
result += '\n';
}
return '"""' + result.replace(/"""/g, '\\"""') + '"""';
}

View File

@@ -0,0 +1,32 @@
/**
* The set of allowed directive location values.
*/
export const DirectiveLocation: {
// Request Definitions
QUERY: 'QUERY';
MUTATION: 'MUTATION';
SUBSCRIPTION: 'SUBSCRIPTION';
FIELD: 'FIELD';
FRAGMENT_DEFINITION: 'FRAGMENT_DEFINITION';
FRAGMENT_SPREAD: 'FRAGMENT_SPREAD';
INLINE_FRAGMENT: 'INLINE_FRAGMENT';
VARIABLE_DEFINITION: 'VARIABLE_DEFINITION';
// Type System Definitions
SCHEMA: 'SCHEMA';
SCALAR: 'SCALAR';
OBJECT: 'OBJECT';
FIELD_DEFINITION: 'FIELD_DEFINITION';
ARGUMENT_DEFINITION: 'ARGUMENT_DEFINITION';
INTERFACE: 'INTERFACE';
UNION: 'UNION';
ENUM: 'ENUM';
ENUM_VALUE: 'ENUM_VALUE';
INPUT_OBJECT: 'INPUT_OBJECT';
INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION';
};
/**
* The enum type representing the directive location values.
*/
export type DirectiveLocationEnum = typeof DirectiveLocation[keyof typeof DirectiveLocation];

View File

@@ -0,0 +1,38 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.DirectiveLocation = void 0;
/**
* The set of allowed directive location values.
*/
var DirectiveLocation = Object.freeze({
// Request Definitions
QUERY: 'QUERY',
MUTATION: 'MUTATION',
SUBSCRIPTION: 'SUBSCRIPTION',
FIELD: 'FIELD',
FRAGMENT_DEFINITION: 'FRAGMENT_DEFINITION',
FRAGMENT_SPREAD: 'FRAGMENT_SPREAD',
INLINE_FRAGMENT: 'INLINE_FRAGMENT',
VARIABLE_DEFINITION: 'VARIABLE_DEFINITION',
// Type System Definitions
SCHEMA: 'SCHEMA',
SCALAR: 'SCALAR',
OBJECT: 'OBJECT',
FIELD_DEFINITION: 'FIELD_DEFINITION',
ARGUMENT_DEFINITION: 'ARGUMENT_DEFINITION',
INTERFACE: 'INTERFACE',
UNION: 'UNION',
ENUM: 'ENUM',
ENUM_VALUE: 'ENUM_VALUE',
INPUT_OBJECT: 'INPUT_OBJECT',
INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION'
});
/**
* The enum type representing the directive location values.
*/
exports.DirectiveLocation = DirectiveLocation;

View File

@@ -0,0 +1,32 @@
// @flow strict
/**
* The set of allowed directive location values.
*/
export const DirectiveLocation = Object.freeze({
// Request Definitions
QUERY: 'QUERY',
MUTATION: 'MUTATION',
SUBSCRIPTION: 'SUBSCRIPTION',
FIELD: 'FIELD',
FRAGMENT_DEFINITION: 'FRAGMENT_DEFINITION',
FRAGMENT_SPREAD: 'FRAGMENT_SPREAD',
INLINE_FRAGMENT: 'INLINE_FRAGMENT',
VARIABLE_DEFINITION: 'VARIABLE_DEFINITION',
// Type System Definitions
SCHEMA: 'SCHEMA',
SCALAR: 'SCALAR',
OBJECT: 'OBJECT',
FIELD_DEFINITION: 'FIELD_DEFINITION',
ARGUMENT_DEFINITION: 'ARGUMENT_DEFINITION',
INTERFACE: 'INTERFACE',
UNION: 'UNION',
ENUM: 'ENUM',
ENUM_VALUE: 'ENUM_VALUE',
INPUT_OBJECT: 'INPUT_OBJECT',
INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION',
});
/**
* The enum type representing the directive location values.
*/
export type DirectiveLocationEnum = $Values<typeof DirectiveLocation>;

View File

@@ -0,0 +1,29 @@
/**
* The set of allowed directive location values.
*/
export var DirectiveLocation = Object.freeze({
// Request Definitions
QUERY: 'QUERY',
MUTATION: 'MUTATION',
SUBSCRIPTION: 'SUBSCRIPTION',
FIELD: 'FIELD',
FRAGMENT_DEFINITION: 'FRAGMENT_DEFINITION',
FRAGMENT_SPREAD: 'FRAGMENT_SPREAD',
INLINE_FRAGMENT: 'INLINE_FRAGMENT',
VARIABLE_DEFINITION: 'VARIABLE_DEFINITION',
// Type System Definitions
SCHEMA: 'SCHEMA',
SCALAR: 'SCALAR',
OBJECT: 'OBJECT',
FIELD_DEFINITION: 'FIELD_DEFINITION',
ARGUMENT_DEFINITION: 'ARGUMENT_DEFINITION',
INTERFACE: 'INTERFACE',
UNION: 'UNION',
ENUM: 'ENUM',
ENUM_VALUE: 'ENUM_VALUE',
INPUT_OBJECT: 'INPUT_OBJECT',
INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION'
});
/**
* The enum type representing the directive location values.
*/

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,987 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var grammar = {
Name: {
token: 'Name'
},
String: {
token: 'String'
},
BlockString: {
token: 'BlockString'
},
Document: {
listOfType: 'Definition'
},
Definition: {
peek: [{
ifCondition: {
token: 'Name',
oneOf: ['query', 'mutation', 'subscription']
},
expect: 'OperationDefinition'
}, {
ifCondition: {
token: 'Name',
ofValue: 'fragment'
},
expect: 'FragmentDefinition'
}, {
ifCondition: {
token: 'Name',
oneOf: ['schema', 'scalar', 'type', 'interface', 'union', 'enum', 'input', 'directive']
},
expect: 'TypeSystemDefinition'
}, {
ifCondition: {
token: 'Name',
ofValue: 'extend'
},
expect: 'TypeSystemExtension'
}, {
ifCondition: {
token: '{'
},
expect: 'OperationDefinition'
}, {
ifCondition: 'String',
expect: 'TypeSystemDefinition'
}, {
ifCondition: 'BlockString',
expect: 'TypeSystemDefinition'
}]
},
OperationDefinition: {
peek: [{
ifCondition: {
token: '{'
},
expect: 'SelectionSet'
}, {
ifCondition: {
token: 'Name',
oneOf: ['query', 'mutation', 'subscription']
},
expect: ['OperationType', {
token: 'Name',
optional: true,
tokenName: 'OperationName',
definitionName: true
}, {
ofType: 'VariableDefinitions',
optional: true
}, {
ofType: 'Directives',
optional: true
}, 'SelectionSet']
}]
},
OperationType: {
ofType: 'OperationTypeName'
},
OperationTypeName: {
token: 'Name',
oneOf: ['query', 'mutation', 'subscription'],
definitionName: true
},
SelectionSet: [{
token: '{'
}, {
listOfType: 'Selection'
}, {
token: '}'
}],
Selection: {
peek: [{
ifCondition: {
token: '...'
},
expect: 'Fragment'
}, {
ifCondition: {
token: 'Name'
},
expect: 'Field'
}]
},
Field: [{
ofType: 'Alias',
optional: true,
eatNextOnFail: true,
definitionName: true
}, {
token: 'Name',
tokenName: 'FieldName',
definitionName: true
}, {
ofType: 'Arguments',
optional: true
}, {
ofType: 'Directives',
optional: true
}, {
ofType: 'SelectionSet',
optional: true
}],
Arguments: [{
token: '('
}, {
listOfType: 'Argument'
}, {
token: ')'
}],
Argument: [{
token: 'Name',
tokenName: 'ArgumentName',
definitionName: true
}, {
token: ':'
}, 'Value'],
Alias: [{
token: 'Name',
tokenName: 'AliasName',
definitionName: true
}, {
token: ':'
}],
Fragment: [{
token: '...'
}, {
peek: [{
ifCondition: 'FragmentName',
expect: 'FragmentSpread'
}, {
ifCondition: {
token: 'Name',
ofValue: 'on'
},
expect: 'InlineFragment'
}, {
ifCondition: {
token: '@'
},
expect: 'InlineFragment'
}, {
ifCondition: {
token: '{'
},
expect: 'InlineFragment'
}]
}],
FragmentSpread: ['FragmentName', {
ofType: 'Directives',
optional: true
}],
FragmentDefinition: [{
token: 'Name',
ofValue: 'fragment',
tokenName: 'FragmentDefinitionKeyword'
}, 'FragmentName', 'TypeCondition', {
ofType: 'Directives',
optional: true
}, 'SelectionSet'],
FragmentName: {
token: 'Name',
butNot: {
token: 'Name',
ofValue: 'on'
},
definitionName: true
},
TypeCondition: [{
token: 'Name',
ofValue: 'on',
tokenName: 'OnKeyword'
}, 'TypeName'],
InlineFragment: [{
ofType: 'TypeCondition',
optional: true
}, {
ofType: 'Directives',
optional: true
}, 'SelectionSet'],
Value: {
peek: [{
ifCondition: {
token: '$'
},
expect: 'Variable'
}, {
ifCondition: 'IntValue',
expect: {
ofType: 'IntValue',
tokenName: 'NumberValue'
}
}, {
ifCondition: 'FloatValue',
expect: {
ofType: 'FloatValue',
tokenName: 'NumberValue'
}
}, {
ifCondition: 'BooleanValue',
expect: {
ofType: 'BooleanValue',
tokenName: 'BooleanValue'
}
}, {
ifCondition: 'EnumValue',
expect: {
ofType: 'EnumValue',
tokenName: 'EnumValue'
}
}, {
ifCondition: 'String',
expect: {
ofType: 'String',
tokenName: 'StringValue'
}
}, {
ifCondition: 'BlockString',
expect: {
ofType: 'BlockString',
tokenName: 'StringValue'
}
}, {
ifCondition: 'NullValue',
expect: {
ofType: 'NullValue',
tokenName: 'NullValue'
}
}, {
ifCondition: {
token: '['
},
expect: 'ListValue'
}, {
ifCondition: {
token: '{'
},
expect: 'ObjectValue'
}]
},
ConstValue: {
peek: [{
ifCondition: 'IntValue',
expect: {
ofType: 'IntValue'
}
}, {
ifCondition: 'FloatValue',
expect: {
ofType: 'FloatValue'
}
}, {
ifCondition: 'BooleanValue',
expect: 'BooleanValue'
}, {
ifCondition: 'EnumValue',
expect: 'EnumValue'
}, {
ifCondition: 'String',
expect: {
ofType: 'String',
tokenName: 'StringValue'
}
}, {
ifCondition: 'BlockString',
expect: {
token: 'BlockString',
tokenName: 'StringValue'
}
}, {
ifCondition: 'NullValue',
expect: 'NullValue'
}, {
ifCondition: {
token: '['
},
expect: 'ConstListValue'
}, {
ifCondition: {
token: '{'
},
expect: 'ObjectValue'
}]
},
IntValue: {
token: 'Int'
},
FloatValue: {
token: 'Float'
},
StringValue: {
peek: [{
ifCondition: {
token: 'String'
},
expect: {
token: 'String',
tokenName: 'StringValue'
}
}, {
ifCondition: {
token: 'BlockString'
},
expect: {
token: 'BlockString',
tokenName: 'StringValue'
}
}]
},
BooleanValue: {
token: 'Name',
oneOf: ['true', 'false'],
tokenName: 'BooleanValue'
},
NullValue: {
token: 'Name',
ofValue: 'null',
tokenName: 'NullValue'
},
EnumValue: {
token: 'Name',
butNot: {
token: 'Name',
oneOf: ['null', 'true', 'false']
},
tokenName: 'EnumValue'
},
ListValue: [{
token: '['
}, {
listOfType: 'Value',
optional: true
}, {
token: ']'
}],
ConstListValue: [{
token: '['
}, {
listOfType: 'ConstValue',
optional: true
}, {
token: ']'
}],
ObjectValue: [{
token: '{'
}, {
listOfType: 'ObjectField',
optional: true
}, {
token: '}'
}],
ObjectField: [{
token: 'Name',
tokenName: 'ObjectFieldName'
}, {
token: ':'
}, {
ofType: 'ConstValue'
}],
Variable: [{
token: '$',
tokenName: 'VariableName'
}, {
token: 'Name',
tokenName: 'VariableName'
}],
VariableDefinitions: [{
token: '('
}, {
listOfType: 'VariableDefinition'
}, {
token: ')'
}],
VariableDefinition: ['Variable', {
token: ':'
}, 'Type', {
ofType: 'DefaultValue',
optional: true
}],
DefaultValue: [{
token: '='
}, 'ConstValue'],
TypeName: {
token: 'Name',
tokenName: 'TypeName',
typeName: true
},
Type: {
peek: [{
ifCondition: {
token: 'Name'
},
expect: ['TypeName', {
token: '!',
optional: true
}]
}, {
ifCondition: {
token: '['
},
expect: 'ListType'
}]
},
ListType: [{
token: '['
}, {
listOfType: 'Type'
}, {
token: ']'
}, {
token: '!',
optional: true
}],
Directives: {
listOfType: 'Directive'
},
Directive: [{
token: '@',
tokenName: 'DirectiveName'
}, {
token: 'Name',
tokenName: 'DirectiveName'
}, {
ofType: 'Arguments',
optional: true
}],
TypeSystemDefinition: [{
ofType: 'Description',
optional: true
}, {
peek: [{
ifCondition: {
target: 'Name',
ofValue: 'schema'
},
expect: 'SchemaDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'scalar'
},
expect: 'ScalarTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'type'
},
expect: 'ObjectTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'interface'
},
expect: 'InterfaceTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'union'
},
expect: 'UnionTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'enum'
},
expect: 'EnumTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'input'
},
expect: 'InputObjectTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'directive'
},
expect: 'DirectiveDefinition'
}]
}],
TypeSystemExtension: {
peek: [{
ifCondition: {
target: 'Name',
ofValue: 'schema'
},
expect: 'SchemaExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'scalar'
},
expect: 'ScalarTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'type'
},
expect: 'ObjectTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'interface'
},
expect: 'InterfaceTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'union'
},
expect: 'UnionTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'enum'
},
expect: 'EnumTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'input'
},
expect: 'InputObjectTypeExtension'
}]
},
SchemaDefinition: [{
token: 'Name',
ofValue: 'schema',
tokenName: 'SchemaDefinitionKeyword'
}, {
ofType: 'Directives',
optional: true
}, {
token: '{'
}, {
listOfType: 'RootOperationTypeDefinition'
}, {
token: '}'
}],
RootOperationTypeDefinition: ['OperationType', {
token: ':'
}, {
token: 'Name',
tokenName: 'OperationTypeDefinitionName'
}],
SchemaExtension: [{
token: 'Name',
ofValue: 'extend'
}, {
token: 'Name',
ofValue: 'schema'
}, 'Name', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: [{
token: '{'
}, {
listOfType: 'RootOperationTypeDefinition'
}, {
token: '}'
}],
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: [{
token: '{'
}, {
listOfType: 'RootOperationTypeDefinition'
}, {
token: '}'
}]
}]
}],
Description: 'StringValue',
ScalarTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'scalar',
tokenName: 'ScalarDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}],
ScalarTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'scalar',
tokenName: 'ScalarDefinitionKeyword'
}, 'TypeName', 'Directives'],
ObjectTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'type',
tokenName: 'TypeDefinitionKeyword'
}, 'TypeName', {
ofType: 'ImplementsInterfaces',
optional: true
}, {
ofType: 'Directives',
optional: true
}, {
ofType: 'FieldsDefinition',
optional: true
}],
ImplementsInterfaces: [{
token: 'Name',
ofValue: 'implements',
tokenName: 'ImplementsKeyword'
}, {
token: '&',
optional: true
}, 'TypeName', {
listOfType: 'ImplementsAdditionalInterfaceName',
optional: true
}],
ImplementsAdditionalInterfaceName: [{
token: '&'
}, 'TypeName'],
FieldsDefinition: [{
token: '{'
}, {
listOfType: 'FieldDefinition'
}, {
token: '}'
}],
FieldDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
tokenName: 'AliasName',
definitionName: true
}, {
ofType: 'ArgumentsDefinition',
optional: true
}, {
token: ':'
}, 'Type', {
ofType: 'Directives',
optional: true
}],
ArgumentsDefinition: [{
token: '('
}, {
listOfType: 'InputValueDefinition'
}, {
token: ')'
}],
InputValueDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
tokenName: 'ArgumentName'
}, {
token: ':'
}, 'Type', {
ofType: 'DefaultValue',
optional: true
}, {
ofType: 'Directives',
optional: true
}],
ObjectTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'type',
tokenName: 'TypeDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: 'Name',
ofValue: 'interface'
},
expect: ['ImplementsInterfaces', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'FieldsDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'FieldsDefinition'
}],
optional: true
}]
}, {
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'FieldsDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'FieldsDefinition'
}]
}],
InterfaceTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'interface',
tokenName: 'InterfaceDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}, {
ofType: 'FieldsDefinition',
optional: true
}],
InterfaceTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'interface',
tokenName: 'InterfaceDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'FieldsDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'FieldsDefinition'
}]
}],
UnionTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'union',
tokenName: 'UnionDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}, {
ofType: 'UnionMemberTypes',
optional: true
}],
UnionMemberTypes: [{
token: '='
}, {
token: '|',
optional: true
}, 'Name', {
listOfType: 'UnionMemberAdditionalTypeName',
optional: true
}],
UnionMemberAdditionalTypeName: [{
token: '|'
}, 'TypeName'],
UnionTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'union',
tokenName: 'UnionDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'UnionMemberTypes',
optional: true
}]
}, {
ifCondition: {
token: '='
},
expect: 'UnionMemberTypes'
}]
}],
EnumTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'enum',
tokenName: 'EnumDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}, {
ofType: 'EnumValuesDefinition',
optional: true
}],
EnumValuesDefinition: [{
token: '{'
}, {
listOfType: 'EnumValueDefinition'
}, {
token: '}'
}],
EnumValueDefinition: [{
ofType: 'Description',
optional: true
}, 'EnumValue', {
ofType: 'Directives',
optional: true
}],
EnumTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'enum',
tokenName: 'EnumDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'EnumValuesDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'EnumValuesDefinition'
}]
}],
InputObjectTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'input',
tokenName: 'InputDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}, {
ofType: 'InputFieldsDefinition',
optional: true
}],
InputFieldsDefinition: [{
token: '{'
}, {
listOfType: 'InputValueDefinition'
}, {
token: '}'
}],
InputObjectTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'input',
tokenName: 'InputDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'InputFieldsDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'InputFieldsDefinition'
}]
}],
DirectiveDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'directive',
tokenName: 'DirectiveDefinitionKeyword'
}, {
token: '@',
tokenName: 'DirectiveName'
}, {
token: 'Name',
tokenName: 'DirectiveName'
}, {
ofType: 'ArgumentsDefinition',
optional: true
}, {
token: 'Name',
ofValue: 'on',
tokenName: 'OnKeyword'
}, 'DirectiveLocations'],
DirectiveLocations: [{
token: '|',
optional: true
}, 'DirectiveLocation', {
listOfType: 'DirectiveLocationAdditionalName',
optional: true
}],
DirectiveLocationAdditionalName: [{
token: '|'
}, 'DirectiveLocation'],
DirectiveLocation: {
peek: [{
ifCondition: 'ExecutableDirectiveLocation',
expect: 'ExecutableDirectiveLocation'
}, {
ifCondition: 'TypeSystemDirectiveLocation',
expect: 'TypeSystemDirectiveLocation'
}]
},
ExecutableDirectiveLocation: {
token: 'Name',
oneOf: ['QUERY', 'MUTATION', 'SUBSCRIPTION', 'FIELD', 'FRAGMENT_DEFINITION', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT'],
tokenName: 'EnumValue'
},
TypeSystemDirectiveLocation: {
token: 'Name',
oneOf: ['SCHEMA', 'SCALAR', 'OBJECT', 'FIELD_DEFINITION', 'ARGUMENT_DEFINITION', 'INTERFACE', 'UNION', 'ENUM', 'ENUM_VALUE', 'INPUT_OBJECT', 'INPUT_FIELD_DEFINITION'],
tokenName: 'EnumValue'
} // FIXME: enforce proper typing
};
var _default = grammar;
exports.default = _default;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,980 @@
var grammar = {
Name: {
token: 'Name'
},
String: {
token: 'String'
},
BlockString: {
token: 'BlockString'
},
Document: {
listOfType: 'Definition'
},
Definition: {
peek: [{
ifCondition: {
token: 'Name',
oneOf: ['query', 'mutation', 'subscription']
},
expect: 'OperationDefinition'
}, {
ifCondition: {
token: 'Name',
ofValue: 'fragment'
},
expect: 'FragmentDefinition'
}, {
ifCondition: {
token: 'Name',
oneOf: ['schema', 'scalar', 'type', 'interface', 'union', 'enum', 'input', 'directive']
},
expect: 'TypeSystemDefinition'
}, {
ifCondition: {
token: 'Name',
ofValue: 'extend'
},
expect: 'TypeSystemExtension'
}, {
ifCondition: {
token: '{'
},
expect: 'OperationDefinition'
}, {
ifCondition: 'String',
expect: 'TypeSystemDefinition'
}, {
ifCondition: 'BlockString',
expect: 'TypeSystemDefinition'
}]
},
OperationDefinition: {
peek: [{
ifCondition: {
token: '{'
},
expect: 'SelectionSet'
}, {
ifCondition: {
token: 'Name',
oneOf: ['query', 'mutation', 'subscription']
},
expect: ['OperationType', {
token: 'Name',
optional: true,
tokenName: 'OperationName',
definitionName: true
}, {
ofType: 'VariableDefinitions',
optional: true
}, {
ofType: 'Directives',
optional: true
}, 'SelectionSet']
}]
},
OperationType: {
ofType: 'OperationTypeName'
},
OperationTypeName: {
token: 'Name',
oneOf: ['query', 'mutation', 'subscription'],
definitionName: true
},
SelectionSet: [{
token: '{'
}, {
listOfType: 'Selection'
}, {
token: '}'
}],
Selection: {
peek: [{
ifCondition: {
token: '...'
},
expect: 'Fragment'
}, {
ifCondition: {
token: 'Name'
},
expect: 'Field'
}]
},
Field: [{
ofType: 'Alias',
optional: true,
eatNextOnFail: true,
definitionName: true
}, {
token: 'Name',
tokenName: 'FieldName',
definitionName: true
}, {
ofType: 'Arguments',
optional: true
}, {
ofType: 'Directives',
optional: true
}, {
ofType: 'SelectionSet',
optional: true
}],
Arguments: [{
token: '('
}, {
listOfType: 'Argument'
}, {
token: ')'
}],
Argument: [{
token: 'Name',
tokenName: 'ArgumentName',
definitionName: true
}, {
token: ':'
}, 'Value'],
Alias: [{
token: 'Name',
tokenName: 'AliasName',
definitionName: true
}, {
token: ':'
}],
Fragment: [{
token: '...'
}, {
peek: [{
ifCondition: 'FragmentName',
expect: 'FragmentSpread'
}, {
ifCondition: {
token: 'Name',
ofValue: 'on'
},
expect: 'InlineFragment'
}, {
ifCondition: {
token: '@'
},
expect: 'InlineFragment'
}, {
ifCondition: {
token: '{'
},
expect: 'InlineFragment'
}]
}],
FragmentSpread: ['FragmentName', {
ofType: 'Directives',
optional: true
}],
FragmentDefinition: [{
token: 'Name',
ofValue: 'fragment',
tokenName: 'FragmentDefinitionKeyword'
}, 'FragmentName', 'TypeCondition', {
ofType: 'Directives',
optional: true
}, 'SelectionSet'],
FragmentName: {
token: 'Name',
butNot: {
token: 'Name',
ofValue: 'on'
},
definitionName: true
},
TypeCondition: [{
token: 'Name',
ofValue: 'on',
tokenName: 'OnKeyword'
}, 'TypeName'],
InlineFragment: [{
ofType: 'TypeCondition',
optional: true
}, {
ofType: 'Directives',
optional: true
}, 'SelectionSet'],
Value: {
peek: [{
ifCondition: {
token: '$'
},
expect: 'Variable'
}, {
ifCondition: 'IntValue',
expect: {
ofType: 'IntValue',
tokenName: 'NumberValue'
}
}, {
ifCondition: 'FloatValue',
expect: {
ofType: 'FloatValue',
tokenName: 'NumberValue'
}
}, {
ifCondition: 'BooleanValue',
expect: {
ofType: 'BooleanValue',
tokenName: 'BooleanValue'
}
}, {
ifCondition: 'EnumValue',
expect: {
ofType: 'EnumValue',
tokenName: 'EnumValue'
}
}, {
ifCondition: 'String',
expect: {
ofType: 'String',
tokenName: 'StringValue'
}
}, {
ifCondition: 'BlockString',
expect: {
ofType: 'BlockString',
tokenName: 'StringValue'
}
}, {
ifCondition: 'NullValue',
expect: {
ofType: 'NullValue',
tokenName: 'NullValue'
}
}, {
ifCondition: {
token: '['
},
expect: 'ListValue'
}, {
ifCondition: {
token: '{'
},
expect: 'ObjectValue'
}]
},
ConstValue: {
peek: [{
ifCondition: 'IntValue',
expect: {
ofType: 'IntValue'
}
}, {
ifCondition: 'FloatValue',
expect: {
ofType: 'FloatValue'
}
}, {
ifCondition: 'BooleanValue',
expect: 'BooleanValue'
}, {
ifCondition: 'EnumValue',
expect: 'EnumValue'
}, {
ifCondition: 'String',
expect: {
ofType: 'String',
tokenName: 'StringValue'
}
}, {
ifCondition: 'BlockString',
expect: {
token: 'BlockString',
tokenName: 'StringValue'
}
}, {
ifCondition: 'NullValue',
expect: 'NullValue'
}, {
ifCondition: {
token: '['
},
expect: 'ConstListValue'
}, {
ifCondition: {
token: '{'
},
expect: 'ObjectValue'
}]
},
IntValue: {
token: 'Int'
},
FloatValue: {
token: 'Float'
},
StringValue: {
peek: [{
ifCondition: {
token: 'String'
},
expect: {
token: 'String',
tokenName: 'StringValue'
}
}, {
ifCondition: {
token: 'BlockString'
},
expect: {
token: 'BlockString',
tokenName: 'StringValue'
}
}]
},
BooleanValue: {
token: 'Name',
oneOf: ['true', 'false'],
tokenName: 'BooleanValue'
},
NullValue: {
token: 'Name',
ofValue: 'null',
tokenName: 'NullValue'
},
EnumValue: {
token: 'Name',
butNot: {
token: 'Name',
oneOf: ['null', 'true', 'false']
},
tokenName: 'EnumValue'
},
ListValue: [{
token: '['
}, {
listOfType: 'Value',
optional: true
}, {
token: ']'
}],
ConstListValue: [{
token: '['
}, {
listOfType: 'ConstValue',
optional: true
}, {
token: ']'
}],
ObjectValue: [{
token: '{'
}, {
listOfType: 'ObjectField',
optional: true
}, {
token: '}'
}],
ObjectField: [{
token: 'Name',
tokenName: 'ObjectFieldName'
}, {
token: ':'
}, {
ofType: 'ConstValue'
}],
Variable: [{
token: '$',
tokenName: 'VariableName'
}, {
token: 'Name',
tokenName: 'VariableName'
}],
VariableDefinitions: [{
token: '('
}, {
listOfType: 'VariableDefinition'
}, {
token: ')'
}],
VariableDefinition: ['Variable', {
token: ':'
}, 'Type', {
ofType: 'DefaultValue',
optional: true
}],
DefaultValue: [{
token: '='
}, 'ConstValue'],
TypeName: {
token: 'Name',
tokenName: 'TypeName',
typeName: true
},
Type: {
peek: [{
ifCondition: {
token: 'Name'
},
expect: ['TypeName', {
token: '!',
optional: true
}]
}, {
ifCondition: {
token: '['
},
expect: 'ListType'
}]
},
ListType: [{
token: '['
}, {
listOfType: 'Type'
}, {
token: ']'
}, {
token: '!',
optional: true
}],
Directives: {
listOfType: 'Directive'
},
Directive: [{
token: '@',
tokenName: 'DirectiveName'
}, {
token: 'Name',
tokenName: 'DirectiveName'
}, {
ofType: 'Arguments',
optional: true
}],
TypeSystemDefinition: [{
ofType: 'Description',
optional: true
}, {
peek: [{
ifCondition: {
target: 'Name',
ofValue: 'schema'
},
expect: 'SchemaDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'scalar'
},
expect: 'ScalarTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'type'
},
expect: 'ObjectTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'interface'
},
expect: 'InterfaceTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'union'
},
expect: 'UnionTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'enum'
},
expect: 'EnumTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'input'
},
expect: 'InputObjectTypeDefinition'
}, {
ifCondition: {
target: 'Name',
ofValue: 'directive'
},
expect: 'DirectiveDefinition'
}]
}],
TypeSystemExtension: {
peek: [{
ifCondition: {
target: 'Name',
ofValue: 'schema'
},
expect: 'SchemaExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'scalar'
},
expect: 'ScalarTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'type'
},
expect: 'ObjectTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'interface'
},
expect: 'InterfaceTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'union'
},
expect: 'UnionTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'enum'
},
expect: 'EnumTypeExtension'
}, {
ifCondition: {
target: 'Name',
ofValue: 'input'
},
expect: 'InputObjectTypeExtension'
}]
},
SchemaDefinition: [{
token: 'Name',
ofValue: 'schema',
tokenName: 'SchemaDefinitionKeyword'
}, {
ofType: 'Directives',
optional: true
}, {
token: '{'
}, {
listOfType: 'RootOperationTypeDefinition'
}, {
token: '}'
}],
RootOperationTypeDefinition: ['OperationType', {
token: ':'
}, {
token: 'Name',
tokenName: 'OperationTypeDefinitionName'
}],
SchemaExtension: [{
token: 'Name',
ofValue: 'extend'
}, {
token: 'Name',
ofValue: 'schema'
}, 'Name', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: [{
token: '{'
}, {
listOfType: 'RootOperationTypeDefinition'
}, {
token: '}'
}],
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: [{
token: '{'
}, {
listOfType: 'RootOperationTypeDefinition'
}, {
token: '}'
}]
}]
}],
Description: 'StringValue',
ScalarTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'scalar',
tokenName: 'ScalarDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}],
ScalarTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'scalar',
tokenName: 'ScalarDefinitionKeyword'
}, 'TypeName', 'Directives'],
ObjectTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'type',
tokenName: 'TypeDefinitionKeyword'
}, 'TypeName', {
ofType: 'ImplementsInterfaces',
optional: true
}, {
ofType: 'Directives',
optional: true
}, {
ofType: 'FieldsDefinition',
optional: true
}],
ImplementsInterfaces: [{
token: 'Name',
ofValue: 'implements',
tokenName: 'ImplementsKeyword'
}, {
token: '&',
optional: true
}, 'TypeName', {
listOfType: 'ImplementsAdditionalInterfaceName',
optional: true
}],
ImplementsAdditionalInterfaceName: [{
token: '&'
}, 'TypeName'],
FieldsDefinition: [{
token: '{'
}, {
listOfType: 'FieldDefinition'
}, {
token: '}'
}],
FieldDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
tokenName: 'AliasName',
definitionName: true
}, {
ofType: 'ArgumentsDefinition',
optional: true
}, {
token: ':'
}, 'Type', {
ofType: 'Directives',
optional: true
}],
ArgumentsDefinition: [{
token: '('
}, {
listOfType: 'InputValueDefinition'
}, {
token: ')'
}],
InputValueDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
tokenName: 'ArgumentName'
}, {
token: ':'
}, 'Type', {
ofType: 'DefaultValue',
optional: true
}, {
ofType: 'Directives',
optional: true
}],
ObjectTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'type',
tokenName: 'TypeDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: 'Name',
ofValue: 'interface'
},
expect: ['ImplementsInterfaces', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'FieldsDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'FieldsDefinition'
}],
optional: true
}]
}, {
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'FieldsDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'FieldsDefinition'
}]
}],
InterfaceTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'interface',
tokenName: 'InterfaceDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}, {
ofType: 'FieldsDefinition',
optional: true
}],
InterfaceTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'interface',
tokenName: 'InterfaceDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'FieldsDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'FieldsDefinition'
}]
}],
UnionTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'union',
tokenName: 'UnionDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}, {
ofType: 'UnionMemberTypes',
optional: true
}],
UnionMemberTypes: [{
token: '='
}, {
token: '|',
optional: true
}, 'Name', {
listOfType: 'UnionMemberAdditionalTypeName',
optional: true
}],
UnionMemberAdditionalTypeName: [{
token: '|'
}, 'TypeName'],
UnionTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'union',
tokenName: 'UnionDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'UnionMemberTypes',
optional: true
}]
}, {
ifCondition: {
token: '='
},
expect: 'UnionMemberTypes'
}]
}],
EnumTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'enum',
tokenName: 'EnumDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}, {
ofType: 'EnumValuesDefinition',
optional: true
}],
EnumValuesDefinition: [{
token: '{'
}, {
listOfType: 'EnumValueDefinition'
}, {
token: '}'
}],
EnumValueDefinition: [{
ofType: 'Description',
optional: true
}, 'EnumValue', {
ofType: 'Directives',
optional: true
}],
EnumTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'enum',
tokenName: 'EnumDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'EnumValuesDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'EnumValuesDefinition'
}]
}],
InputObjectTypeDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'input',
tokenName: 'InputDefinitionKeyword'
}, 'TypeName', {
ofType: 'Directives',
optional: true
}, {
ofType: 'InputFieldsDefinition',
optional: true
}],
InputFieldsDefinition: [{
token: '{'
}, {
listOfType: 'InputValueDefinition'
}, {
token: '}'
}],
InputObjectTypeExtension: [{
token: 'Name',
ofValue: 'extend',
tokenName: 'ExtendDefinitionKeyword'
}, {
token: 'Name',
ofValue: 'input',
tokenName: 'InputDefinitionKeyword'
}, 'TypeName', {
peek: [{
ifCondition: {
token: '@'
},
expect: ['Directives', {
ofType: 'InputFieldsDefinition',
optional: true
}]
}, {
ifCondition: {
token: '{'
},
expect: 'InputFieldsDefinition'
}]
}],
DirectiveDefinition: [{
ofType: 'Description',
optional: true
}, {
token: 'Name',
ofValue: 'directive',
tokenName: 'DirectiveDefinitionKeyword'
}, {
token: '@',
tokenName: 'DirectiveName'
}, {
token: 'Name',
tokenName: 'DirectiveName'
}, {
ofType: 'ArgumentsDefinition',
optional: true
}, {
token: 'Name',
ofValue: 'on',
tokenName: 'OnKeyword'
}, 'DirectiveLocations'],
DirectiveLocations: [{
token: '|',
optional: true
}, 'DirectiveLocation', {
listOfType: 'DirectiveLocationAdditionalName',
optional: true
}],
DirectiveLocationAdditionalName: [{
token: '|'
}, 'DirectiveLocation'],
DirectiveLocation: {
peek: [{
ifCondition: 'ExecutableDirectiveLocation',
expect: 'ExecutableDirectiveLocation'
}, {
ifCondition: 'TypeSystemDirectiveLocation',
expect: 'TypeSystemDirectiveLocation'
}]
},
ExecutableDirectiveLocation: {
token: 'Name',
oneOf: ['QUERY', 'MUTATION', 'SUBSCRIPTION', 'FIELD', 'FRAGMENT_DEFINITION', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT'],
tokenName: 'EnumValue'
},
TypeSystemDirectiveLocation: {
token: 'Name',
oneOf: ['SCHEMA', 'SCALAR', 'OBJECT', 'FIELD_DEFINITION', 'ARGUMENT_DEFINITION', 'INTERFACE', 'UNION', 'ENUM', 'ENUM_VALUE', 'INPUT_OBJECT', 'INPUT_FIELD_DEFINITION'],
tokenName: 'EnumValue'
} // FIXME: enforce proper typing
};
export default grammar;

View File

@@ -0,0 +1,6 @@
export {
OnlineParser,
RuleKind,
TokenKind,
OnlineParserState,
} from './onlineParser';

View File

@@ -0,0 +1,31 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "OnlineParser", {
enumerable: true,
get: function get() {
return _onlineParser.OnlineParser;
}
});
Object.defineProperty(exports, "RuleKind", {
enumerable: true,
get: function get() {
return _onlineParser.RuleKind;
}
});
Object.defineProperty(exports, "TokenKind", {
enumerable: true,
get: function get() {
return _onlineParser.TokenKind;
}
});
Object.defineProperty(exports, "OnlineParserState", {
enumerable: true,
get: function get() {
return _onlineParser.OnlineParserState;
}
});
var _onlineParser = require("./onlineParser.js");

View File

@@ -0,0 +1,7 @@
// @flow strict
export {
OnlineParser,
RuleKind,
TokenKind,
OnlineParserState,
} from './onlineParser';

View File

@@ -0,0 +1 @@
export { OnlineParser, RuleKind, TokenKind, OnlineParserState } from "./onlineParser.mjs";

View File

@@ -0,0 +1,125 @@
import { Lexer } from '../lexer';
import {
GraphQLGrammarTokenConstraint,
GraphQLGrammarOfTypeConstraint,
GraphQLGrammarListOfTypeConstraint,
GraphQLGrammarPeekConstraint,
GraphQLGrammarConstraintsSet,
} from './grammar';
interface BaseOnlineParserRule {
kind: string;
name?: string;
depth: number;
step: number;
expanded: boolean;
state: string;
optional?: boolean;
eatNextOnFail?: boolean;
}
interface TokenOnlineParserRule
extends BaseOnlineParserRule,
GraphQLGrammarTokenConstraint {}
interface OfTypeOnlineParserRule
extends BaseOnlineParserRule,
GraphQLGrammarOfTypeConstraint {}
interface ListOfTypeOnlineParserRule
extends BaseOnlineParserRule,
GraphQLGrammarListOfTypeConstraint {}
interface PeekOnlineParserRule
extends BaseOnlineParserRule,
GraphQLGrammarPeekConstraint {
index: number;
matched: boolean;
}
interface ConstraintsSetOnlineParserRule extends BaseOnlineParserRule {
constraintsSet: boolean;
constraints: GraphQLGrammarConstraintsSet;
}
type OnlineParserRule =
| TokenOnlineParserRule
| OfTypeOnlineParserRule
| ListOfTypeOnlineParserRule
| PeekOnlineParserRule
| ConstraintsSetOnlineParserRule;
export interface OnlineParserState {
rules: Array<OnlineParserRule>;
kind: () => string;
step: () => number;
levels: Array<number>;
indentLevel: number | undefined;
name: string | null;
type: string | null;
}
interface Token {
kind: string;
value?: string;
tokenName?: string | undefined;
ruleName?: string | undefined;
}
type OnlineParserConfig = {
tabSize: number;
};
type OnlineParserConfigOption = {
tabSize?: number;
};
export class OnlineParser {
state: OnlineParserState;
_lexer: Lexer;
_config: OnlineParserConfig;
constructor(
source: string,
state?: OnlineParserState,
config?: OnlineParserConfigOption,
);
static startState(): OnlineParserState;
static copyState(state: OnlineParserState): OnlineParserState;
sol(): boolean;
parseToken(): Token;
indentation(): number;
private readonly _parseTokenConstraint;
private readonly _parseListOfTypeConstraint;
private readonly _parseOfTypeConstraint;
private readonly _parsePeekConstraint;
private readonly _parseConstraintsSetRule;
private readonly _matchToken;
private readonly _butNot;
private readonly _transformLexerToken;
private readonly _getNextRule;
private readonly _popMatchedRule;
private readonly _rollbackRule;
private readonly _pushRule;
private readonly _getRuleKind;
private readonly _advanceToken;
private readonly _lookAhead;
}
export const TokenKind: {
NAME: string;
INT: string;
FLOAT: string;
STRING: string;
BLOCK_STRING: string;
COMMENT: string;
PUNCTUATION: string;
EOF: string;
INVALID: string;
};
export const RuleKind: {
TOKEN_CONSTRAINT: string;
OF_TYPE_CONSTRAINT: string;
LIST_OF_TYPE_CONSTRAINT: string;
PEEK_CONSTRAINT: string;
CONSTRAINTS_SET: string;
CONSTRAINTS_SET_ROOT: string;
RULE_NAME: string;
INVALID: string;
};

View File

@@ -0,0 +1,604 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.OnlineParser = exports.RuleKind = exports.TokenKind = void 0;
var _lexer = require("../lexer.js");
var _source = require("../source.js");
var _grammar = _interopRequireDefault(require("./grammar.js"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var TokenKind = {
NAME: 'Name',
INT: 'Int',
FLOAT: 'Float',
STRING: 'String',
BLOCK_STRING: 'BlockString',
COMMENT: 'Comment',
PUNCTUATION: 'Punctuation',
EOF: '<EOF>',
INVALID: 'Invalid'
};
exports.TokenKind = TokenKind;
var RuleKind = {
TOKEN_CONSTRAINT: 'TokenConstraint',
OF_TYPE_CONSTRAINT: 'OfTypeConstraint',
LIST_OF_TYPE_CONSTRAINT: 'ListOfTypeConstraint',
PEEK_CONSTRAINT: 'PeekConstraint',
CONSTRAINTS_SET: 'ConstraintsSet',
CONSTRAINTS_SET_ROOT: 'ConstraintsSetRoot',
RULE_NAME: 'RuleName',
INVALID: 'Invalid'
};
exports.RuleKind = RuleKind;
var OnlineParser = /*#__PURE__*/function () {
function OnlineParser(source, state, config) {
var _config$tabSize;
this.state = state || OnlineParser.startState();
this._config = {
tabSize: (_config$tabSize = config === null || config === void 0 ? void 0 : config.tabSize) !== null && _config$tabSize !== void 0 ? _config$tabSize : 2
};
this._lexer = new _lexer.Lexer(new _source.Source(source));
}
OnlineParser.startState = function startState() {
return {
rules: [// $FlowFixMe[cannot-spread-interface]
_objectSpread(_objectSpread({
name: 'Document',
state: 'Document',
kind: 'ListOfTypeConstraint'
}, _grammar.default.Document), {}, {
expanded: false,
depth: 1,
step: 1
})],
name: null,
type: null,
levels: [],
indentLevel: 0,
kind: function kind() {
var _this$rules;
return ((_this$rules = this.rules[this.rules.length - 1]) === null || _this$rules === void 0 ? void 0 : _this$rules.state) || '';
},
step: function step() {
var _this$rules2;
return ((_this$rules2 = this.rules[this.rules.length - 1]) === null || _this$rules2 === void 0 ? void 0 : _this$rules2.step) || 0;
}
};
};
OnlineParser.copyState = function copyState(state) {
return {
name: state.name,
type: state.type,
rules: JSON.parse(JSON.stringify(state.rules)),
levels: [].concat(state.levels),
indentLevel: state.indentLevel,
kind: function kind() {
var _this$rules3;
return ((_this$rules3 = this.rules[this.rules.length - 1]) === null || _this$rules3 === void 0 ? void 0 : _this$rules3.state) || '';
},
step: function step() {
var _this$rules4;
return ((_this$rules4 = this.rules[this.rules.length - 1]) === null || _this$rules4 === void 0 ? void 0 : _this$rules4.step) || 0;
}
};
};
var _proto = OnlineParser.prototype;
_proto.sol = function sol() {
return this._lexer.source.locationOffset.line === 1 && this._lexer.source.locationOffset.column === 1;
};
_proto.parseToken = function parseToken() {
var rule = this._getNextRule();
if (this.sol()) {
this.state.indentLevel = Math.floor(this.indentation() / this._config.tabSize);
}
if (!rule) {
return {
kind: TokenKind.INVALID,
value: ''
};
}
var token;
if (this._lookAhead().kind === '<EOF>') {
return {
kind: TokenKind.EOF,
value: '',
ruleName: rule.name
};
}
switch (rule.kind) {
case RuleKind.TOKEN_CONSTRAINT:
token = this._parseTokenConstraint(rule);
break;
case RuleKind.LIST_OF_TYPE_CONSTRAINT:
token = this._parseListOfTypeConstraint(rule);
break;
case RuleKind.OF_TYPE_CONSTRAINT:
token = this._parseOfTypeConstraint(rule);
break;
case RuleKind.PEEK_CONSTRAINT:
token = this._parsePeekConstraint(rule);
break;
case RuleKind.CONSTRAINTS_SET_ROOT:
token = this._parseConstraintsSetRule(rule);
break;
default:
return {
kind: TokenKind.INVALID,
value: '',
ruleName: rule.name
};
}
if (token && token.kind === TokenKind.INVALID) {
if (rule.optional === true) {
this.state.rules.pop();
} else {
this._rollbackRule();
}
return this.parseToken() || token;
}
return token;
};
_proto.indentation = function indentation() {
var match = this._lexer.source.body.match(/\s*/);
var indent = 0;
if (match && match.length === 0) {
var whiteSpaces = match[0];
var pos = 0;
while (whiteSpaces.length > pos) {
if (whiteSpaces.charCodeAt(pos) === 9) {
indent += 2;
} else {
indent++;
}
pos++;
}
}
return indent;
};
_proto._parseTokenConstraint = function _parseTokenConstraint(rule) {
rule.expanded = true;
var token = this._lookAhead();
if (!this._matchToken(token, rule)) {
return {
kind: TokenKind.INVALID,
value: '',
tokenName: rule.tokenName,
ruleName: rule.name
};
}
this._advanceToken();
var parserToken = this._transformLexerToken(token, rule);
this._popMatchedRule(parserToken);
return parserToken;
};
_proto._parseListOfTypeConstraint = function _parseListOfTypeConstraint(rule) {
this._pushRule(_grammar.default[rule.listOfType], rule.depth + 1, rule.listOfType, 1, rule.state);
rule.expanded = true;
var token = this.parseToken();
return token;
};
_proto._parseOfTypeConstraint = function _parseOfTypeConstraint(rule) {
if (rule.expanded) {
this._popMatchedRule();
return this.parseToken();
}
this._pushRule(rule.ofType, rule.depth + 1, rule.tokenName, 1, rule.state);
rule.expanded = true;
var token = this.parseToken();
return token;
};
_proto._parsePeekConstraint = function _parsePeekConstraint(rule) {
if (rule.expanded) {
this._popMatchedRule();
return this.parseToken();
}
while (!rule.matched && rule.index < rule.peek.length - 1) {
rule.index++;
var constraint = rule.peek[rule.index];
var ifCondition = constraint.ifCondition;
if (typeof ifCondition === 'string') {
ifCondition = _grammar.default[ifCondition];
}
var token = this._lookAhead();
if (ifCondition && this._matchToken(token, ifCondition)) {
rule.matched = true;
rule.expanded = true;
this._pushRule(constraint.expect, rule.depth + 1, '', 1, rule.state);
token = this.parseToken();
return token;
}
}
return {
kind: TokenKind.INVALID,
value: '',
ruleName: rule.name
};
};
_proto._parseConstraintsSetRule = function _parseConstraintsSetRule(rule) {
if (rule.expanded) {
this._popMatchedRule();
return this.parseToken();
}
for (var index = rule.constraints.length - 1; index >= 0; index--) {
this._pushRule(rule.constraints[index], rule.depth + 1, '', index, rule.state);
}
rule.expanded = true;
return this.parseToken();
};
_proto._matchToken = function _matchToken(token, rule) {
if (typeof token.value === 'string') {
if (typeof rule.ofValue === 'string' && token.value !== rule.ofValue || Array.isArray(rule.oneOf) && !rule.oneOf.includes(token.value) || typeof rule.ofValue !== 'string' && !Array.isArray(rule.oneOf) && token.kind !== rule.token) {
return false;
}
return this._butNot(token, rule);
}
if (token.kind !== rule.token) {
return false;
}
return this._butNot(token, rule);
};
_proto._butNot = function _butNot(token, rule) {
var _this = this;
if (rule.butNot) {
if (Array.isArray(rule.butNot)) {
if (rule.butNot.reduce(function (matched, constraint) {
return matched || _this._matchToken(token, constraint);
}, false)) {
return false;
}
return true;
}
return !this._matchToken(token, rule.butNot);
}
return true;
};
_proto._transformLexerToken = function _transformLexerToken(lexerToken, rule) {
var token;
var ruleName = rule.name || '';
var tokenName = rule.tokenName || '';
if (lexerToken.kind === '<EOF>' || lexerToken.value !== undefined) {
token = {
kind: lexerToken.kind,
value: lexerToken.value || '',
tokenName: tokenName,
ruleName: ruleName
};
if (token.kind === TokenKind.STRING) {
token.value = "\"".concat(token.value, "\"");
} else if (token.kind === TokenKind.BLOCK_STRING) {
token.value = "\"\"\"".concat(token.value, "\"\"\"");
}
} else {
token = {
kind: TokenKind.PUNCTUATION,
value: lexerToken.kind,
tokenName: tokenName,
ruleName: ruleName
};
if (/^[{([]/.test(token.value)) {
if (this.state.indentLevel !== undefined) {
this.state.levels = this.state.levels.concat(this.state.indentLevel + 1);
}
} else if (/^[})\]]/.test(token.value)) {
this.state.levels.pop();
}
}
return token;
};
_proto._getNextRule = function _getNextRule() {
return this.state.rules[this.state.rules.length - 1] || null;
};
_proto._popMatchedRule = function _popMatchedRule(token) {
var rule = this.state.rules.pop();
if (!rule) {
return;
}
if (token && rule.kind === RuleKind.TOKEN_CONSTRAINT) {
var constraint = rule;
if (typeof constraint.definitionName === 'string') {
this.state.name = token.value || null;
} else if (typeof constraint.typeName === 'string') {
this.state.type = token.value || null;
}
}
var nextRule = this._getNextRule();
if (!nextRule) {
return;
}
if (nextRule.depth === rule.depth - 1 && nextRule.expanded && nextRule.kind === RuleKind.CONSTRAINTS_SET_ROOT) {
this.state.rules.pop();
}
if (nextRule.depth === rule.depth - 1 && nextRule.expanded && nextRule.kind === RuleKind.LIST_OF_TYPE_CONSTRAINT) {
nextRule.expanded = false;
nextRule.optional = true;
}
};
_proto._rollbackRule = function _rollbackRule() {
var _this2 = this;
if (!this.state.rules.length) {
return;
}
var popRule = function popRule() {
var lastPoppedRule = _this2.state.rules.pop();
if (lastPoppedRule.eatNextOnFail === true) {
_this2.state.rules.pop();
}
};
var poppedRule = this.state.rules.pop();
if (!poppedRule) {
return;
}
var popped = 0;
var nextRule = this._getNextRule();
while (nextRule && (poppedRule.kind !== RuleKind.LIST_OF_TYPE_CONSTRAINT || nextRule.expanded) && nextRule.depth > poppedRule.depth - 1) {
this.state.rules.pop();
popped++;
nextRule = this._getNextRule();
}
if (nextRule && nextRule.expanded) {
if (nextRule.optional === true) {
popRule();
} else {
if (nextRule.kind === RuleKind.LIST_OF_TYPE_CONSTRAINT && popped === 1) {
this.state.rules.pop();
return;
}
this._rollbackRule();
}
}
};
_proto._pushRule = function _pushRule(baseRule, depth, name, step, state) {
var _this$_getNextRule, _this$_getNextRule2, _this$_getNextRule3, _this$_getNextRule4, _this$_getNextRule5, _this$_getNextRule6, _this$_getNextRule7, _this$_getNextRule8, _this$_getNextRule9, _this$_getNextRule10;
this.state.name = null;
this.state.type = null;
var rule = baseRule;
switch (this._getRuleKind(rule)) {
case RuleKind.RULE_NAME:
rule = rule;
this._pushRule(_grammar.default[rule], depth, (typeof name === 'string' ? name : undefined) || rule, step, state);
break;
case RuleKind.CONSTRAINTS_SET:
rule = rule;
this.state.rules.push({
name: name || '',
depth: depth,
expanded: false,
constraints: rule,
constraintsSet: true,
kind: RuleKind.CONSTRAINTS_SET_ROOT,
state: (typeof name === 'string' ? name : undefined) || (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule = this._getNextRule()) === null || _this$_getNextRule === void 0 ? void 0 : _this$_getNextRule.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule2 = this._getNextRule()) === null || _this$_getNextRule2 === void 0 ? void 0 : _this$_getNextRule2.step) || 0) + 1
});
break;
case RuleKind.OF_TYPE_CONSTRAINT:
rule = rule;
this.state.rules.push({
name: name || '',
ofType: rule.ofType,
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
depth: depth,
expanded: false,
kind: RuleKind.OF_TYPE_CONSTRAINT,
state: (typeof rule.tokenName === 'string' ? rule.tokenName : undefined) || (typeof name === 'string' ? name : undefined) || (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule3 = this._getNextRule()) === null || _this$_getNextRule3 === void 0 ? void 0 : _this$_getNextRule3.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule4 = this._getNextRule()) === null || _this$_getNextRule4 === void 0 ? void 0 : _this$_getNextRule4.step) || 0) + 1
});
break;
case RuleKind.LIST_OF_TYPE_CONSTRAINT:
rule = rule;
this.state.rules.push({
listOfType: rule.listOfType,
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
name: name || '',
depth: depth,
expanded: false,
kind: RuleKind.LIST_OF_TYPE_CONSTRAINT,
state: (typeof name === 'string' ? name : undefined) || (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule5 = this._getNextRule()) === null || _this$_getNextRule5 === void 0 ? void 0 : _this$_getNextRule5.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule6 = this._getNextRule()) === null || _this$_getNextRule6 === void 0 ? void 0 : _this$_getNextRule6.step) || 0) + 1
});
break;
case RuleKind.TOKEN_CONSTRAINT:
rule = rule;
this.state.rules.push({
token: rule.token,
ofValue: rule.ofValue,
oneOf: rule.oneOf,
definitionName: Boolean(rule.definitionName),
typeName: Boolean(rule.typeName),
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
name: name || '',
depth: depth,
expanded: false,
kind: RuleKind.TOKEN_CONSTRAINT,
state: (typeof rule.tokenName === 'string' ? rule.tokenName : undefined) || (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule7 = this._getNextRule()) === null || _this$_getNextRule7 === void 0 ? void 0 : _this$_getNextRule7.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule8 = this._getNextRule()) === null || _this$_getNextRule8 === void 0 ? void 0 : _this$_getNextRule8.step) || 0) + 1
});
break;
case RuleKind.PEEK_CONSTRAINT:
rule = rule;
this.state.rules.push({
peek: rule.peek,
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
name: name || '',
depth: depth,
index: -1,
matched: false,
expanded: false,
kind: RuleKind.PEEK_CONSTRAINT,
state: (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule9 = this._getNextRule()) === null || _this$_getNextRule9 === void 0 ? void 0 : _this$_getNextRule9.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule10 = this._getNextRule()) === null || _this$_getNextRule10 === void 0 ? void 0 : _this$_getNextRule10.step) || 0) + 1
});
break;
}
};
_proto._getRuleKind = function _getRuleKind(rule) {
if (Array.isArray(rule)) {
return RuleKind.CONSTRAINTS_SET;
}
if (rule.constraintsSet === true) {
return RuleKind.CONSTRAINTS_SET_ROOT;
}
if (typeof rule === 'string') {
return RuleKind.RULE_NAME;
}
if (Object.prototype.hasOwnProperty.call(rule, 'ofType')) {
return RuleKind.OF_TYPE_CONSTRAINT;
}
if (Object.prototype.hasOwnProperty.call(rule, 'listOfType')) {
return RuleKind.LIST_OF_TYPE_CONSTRAINT;
}
if (Object.prototype.hasOwnProperty.call(rule, 'peek')) {
return RuleKind.PEEK_CONSTRAINT;
}
if (Object.prototype.hasOwnProperty.call(rule, 'token')) {
return RuleKind.TOKEN_CONSTRAINT;
}
return RuleKind.INVALID;
};
_proto._advanceToken = function _advanceToken() {
return this._lexer.advance();
};
_proto._lookAhead = function _lookAhead() {
try {
return this._lexer.lookahead();
} catch (err) {
return {
kind: TokenKind.INVALID,
value: ''
};
}
};
return OnlineParser;
}();
exports.OnlineParser = OnlineParser;

View File

@@ -0,0 +1,723 @@
// @flow strict
import { Lexer } from '../lexer';
import { Source } from '../source';
import GraphQLGrammar from './grammar';
import type {
GraphQLGrammarRule,
GraphQLGrammarRuleName,
GraphQLGrammarRuleConstraint,
GraphQLGrammarTokenConstraint,
GraphQLGrammarOfTypeConstraint,
GraphQLGrammarListOfTypeConstraint,
GraphQLGrammarPeekConstraint,
GraphQLGrammarConstraintsSet,
} from './grammar';
export const TokenKind = {
NAME: 'Name',
INT: 'Int',
FLOAT: 'Float',
STRING: 'String',
BLOCK_STRING: 'BlockString',
COMMENT: 'Comment',
PUNCTUATION: 'Punctuation',
EOF: '<EOF>',
INVALID: 'Invalid',
};
export const RuleKind = {
TOKEN_CONSTRAINT: 'TokenConstraint',
OF_TYPE_CONSTRAINT: 'OfTypeConstraint',
LIST_OF_TYPE_CONSTRAINT: 'ListOfTypeConstraint',
PEEK_CONSTRAINT: 'PeekConstraint',
CONSTRAINTS_SET: 'ConstraintsSet',
CONSTRAINTS_SET_ROOT: 'ConstraintsSetRoot',
RULE_NAME: 'RuleName',
INVALID: 'Invalid',
};
interface BaseOnlineParserRule {
kind: string;
name?: string;
depth: number;
step: number;
expanded: boolean;
state: string;
optional?: boolean;
eatNextOnFail?: boolean;
}
interface TokenOnlineParserRule
extends BaseOnlineParserRule,
GraphQLGrammarTokenConstraint {}
interface OfTypeOnlineParserRule
extends BaseOnlineParserRule,
GraphQLGrammarOfTypeConstraint {}
interface ListOfTypeOnlineParserRule
extends BaseOnlineParserRule,
GraphQLGrammarListOfTypeConstraint {}
interface PeekOnlineParserRule
extends BaseOnlineParserRule,
GraphQLGrammarPeekConstraint {
index: number;
matched: boolean;
}
interface ConstraintsSetOnlineParserRule extends BaseOnlineParserRule {
constraintsSet: boolean;
constraints: GraphQLGrammarConstraintsSet;
}
type OnlineParserRule =
| TokenOnlineParserRule
| OfTypeOnlineParserRule
| ListOfTypeOnlineParserRule
| PeekOnlineParserRule
| ConstraintsSetOnlineParserRule;
export type OnlineParserState = {|
rules: Array<OnlineParserRule>,
kind: () => string,
step: () => number,
levels: Array<number>,
indentLevel: number,
name: string | null,
type: string | null,
|};
type Token = {|
kind: string,
value: string,
tokenName?: ?string,
ruleName?: ?string,
|};
type LexerToken = {|
kind: string,
value: ?string,
|};
type OnlineParserConfig = {|
tabSize: number,
|};
type OnlineParserConfigOption = {|
tabSize: ?number,
|};
export class OnlineParser {
state: OnlineParserState;
_lexer: Lexer;
_config: OnlineParserConfig;
constructor(
source: string,
state?: OnlineParserState,
config?: OnlineParserConfigOption,
) {
this.state = state || OnlineParser.startState();
this._config = {
tabSize: config?.tabSize ?? 2,
};
this._lexer = new Lexer(new Source(source));
}
static startState(): OnlineParserState {
return {
rules: [
// $FlowFixMe[cannot-spread-interface]
{
name: 'Document',
state: 'Document',
kind: 'ListOfTypeConstraint',
...GraphQLGrammar.Document,
expanded: false,
depth: 1,
step: 1,
},
],
name: null,
type: null,
levels: [],
indentLevel: 0,
kind(): string {
return this.rules[this.rules.length - 1]?.state || '';
},
step(): number {
return this.rules[this.rules.length - 1]?.step || 0;
},
};
}
static copyState(state: OnlineParserState): OnlineParserState {
return {
name: state.name,
type: state.type,
rules: JSON.parse(JSON.stringify(state.rules)),
levels: [...state.levels],
indentLevel: state.indentLevel,
kind(): string {
return this.rules[this.rules.length - 1]?.state || '';
},
step(): number {
return this.rules[this.rules.length - 1]?.step || 0;
},
};
}
sol(): boolean {
return (
this._lexer.source.locationOffset.line === 1 &&
this._lexer.source.locationOffset.column === 1
);
}
parseToken(): Token {
const rule = (this._getNextRule(): any);
if (this.sol()) {
this.state.indentLevel = Math.floor(
this.indentation() / this._config.tabSize,
);
}
if (!rule) {
return {
kind: TokenKind.INVALID,
value: '',
};
}
let token;
if (this._lookAhead().kind === '<EOF>') {
return {
kind: TokenKind.EOF,
value: '',
ruleName: rule.name,
};
}
switch (rule.kind) {
case RuleKind.TOKEN_CONSTRAINT:
token = this._parseTokenConstraint(rule);
break;
case RuleKind.LIST_OF_TYPE_CONSTRAINT:
token = this._parseListOfTypeConstraint(rule);
break;
case RuleKind.OF_TYPE_CONSTRAINT:
token = this._parseOfTypeConstraint(rule);
break;
case RuleKind.PEEK_CONSTRAINT:
token = this._parsePeekConstraint(rule);
break;
case RuleKind.CONSTRAINTS_SET_ROOT:
token = this._parseConstraintsSetRule(rule);
break;
default:
return {
kind: TokenKind.INVALID,
value: '',
ruleName: rule.name,
};
}
if (token && token.kind === TokenKind.INVALID) {
if (rule.optional === true) {
this.state.rules.pop();
} else {
this._rollbackRule();
}
return this.parseToken() || token;
}
return token;
}
indentation(): number {
const match = this._lexer.source.body.match(/\s*/);
let indent = 0;
if (match && match.length === 0) {
const whiteSpaces = match[0];
let pos = 0;
while (whiteSpaces.length > pos) {
if (whiteSpaces.charCodeAt(pos) === 9) {
indent += 2;
} else {
indent++;
}
pos++;
}
}
return indent;
}
_parseTokenConstraint(rule: TokenOnlineParserRule): Token {
rule.expanded = true;
const token = this._lookAhead();
if (!this._matchToken(token, rule)) {
return {
kind: TokenKind.INVALID,
value: '',
tokenName: rule.tokenName,
ruleName: rule.name,
};
}
this._advanceToken();
const parserToken = this._transformLexerToken(token, rule);
this._popMatchedRule(parserToken);
return parserToken;
}
_parseListOfTypeConstraint(rule: ListOfTypeOnlineParserRule): Token {
this._pushRule(
GraphQLGrammar[rule.listOfType],
rule.depth + 1,
rule.listOfType,
1,
rule.state,
);
rule.expanded = true;
const token = this.parseToken();
return token;
}
_parseOfTypeConstraint(rule: OfTypeOnlineParserRule): Token {
if (rule.expanded) {
this._popMatchedRule();
return this.parseToken();
}
this._pushRule(rule.ofType, rule.depth + 1, rule.tokenName, 1, rule.state);
rule.expanded = true;
const token = this.parseToken();
return token;
}
_parsePeekConstraint(rule: PeekOnlineParserRule): Token {
if (rule.expanded) {
this._popMatchedRule();
return this.parseToken();
}
while (!rule.matched && rule.index < rule.peek.length - 1) {
rule.index++;
const constraint = rule.peek[rule.index];
let { ifCondition } = constraint;
if (typeof ifCondition === 'string') {
ifCondition = GraphQLGrammar[ifCondition];
}
let token = this._lookAhead();
if (ifCondition && this._matchToken(token, ifCondition)) {
rule.matched = true;
rule.expanded = true;
this._pushRule(constraint.expect, rule.depth + 1, '', 1, rule.state);
token = this.parseToken();
return token;
}
}
return {
kind: TokenKind.INVALID,
value: '',
ruleName: rule.name,
};
}
_parseConstraintsSetRule(rule: ConstraintsSetOnlineParserRule): Token {
if (rule.expanded) {
this._popMatchedRule();
return this.parseToken();
}
for (let index = rule.constraints.length - 1; index >= 0; index--) {
this._pushRule(
rule.constraints[index],
rule.depth + 1,
'',
index,
rule.state,
);
}
rule.expanded = true;
return this.parseToken();
}
_matchToken(
token: Token | LexerToken,
rule: GraphQLGrammarTokenConstraint,
): boolean {
if (typeof token.value === 'string') {
if (
(typeof rule.ofValue === 'string' && token.value !== rule.ofValue) ||
(Array.isArray(rule.oneOf) && !rule.oneOf.includes(token.value)) ||
(typeof rule.ofValue !== 'string' &&
!Array.isArray(rule.oneOf) &&
token.kind !== rule.token)
) {
return false;
}
return this._butNot(token, rule);
}
if (token.kind !== rule.token) {
return false;
}
return this._butNot(token, rule);
}
_butNot(
token: Token | LexerToken,
rule: GraphQLGrammarRuleConstraint,
): boolean {
if (rule.butNot) {
if (Array.isArray(rule.butNot)) {
if (
rule.butNot.reduce(
(matched, constraint) =>
matched || this._matchToken(token, constraint),
false,
)
) {
return false;
}
return true;
}
return !this._matchToken(token, rule.butNot);
}
return true;
}
_transformLexerToken(lexerToken: LexerToken, rule: any): Token {
let token;
const ruleName = rule.name || '';
const tokenName = rule.tokenName || '';
if (lexerToken.kind === '<EOF>' || lexerToken.value !== undefined) {
token = {
kind: lexerToken.kind,
value: lexerToken.value || '',
tokenName,
ruleName,
};
if (token.kind === TokenKind.STRING) {
token.value = `"${token.value}"`;
} else if (token.kind === TokenKind.BLOCK_STRING) {
token.value = `"""${token.value}"""`;
}
} else {
token = {
kind: TokenKind.PUNCTUATION,
value: lexerToken.kind,
tokenName,
ruleName,
};
if (/^[{([]/.test(token.value)) {
if (this.state.indentLevel !== undefined) {
this.state.levels = this.state.levels.concat(
this.state.indentLevel + 1,
);
}
} else if (/^[})\]]/.test(token.value)) {
this.state.levels.pop();
}
}
return token;
}
_getNextRule(): OnlineParserRule | null {
return this.state.rules[this.state.rules.length - 1] || null;
}
_popMatchedRule(token: ?Token) {
const rule = this.state.rules.pop();
if (!rule) {
return;
}
if (token && rule.kind === RuleKind.TOKEN_CONSTRAINT) {
const constraint = rule;
if (typeof constraint.definitionName === 'string') {
this.state.name = token.value || null;
} else if (typeof constraint.typeName === 'string') {
this.state.type = token.value || null;
}
}
const nextRule = this._getNextRule();
if (!nextRule) {
return;
}
if (
nextRule.depth === rule.depth - 1 &&
nextRule.expanded &&
nextRule.kind === RuleKind.CONSTRAINTS_SET_ROOT
) {
this.state.rules.pop();
}
if (
nextRule.depth === rule.depth - 1 &&
nextRule.expanded &&
nextRule.kind === RuleKind.LIST_OF_TYPE_CONSTRAINT
) {
nextRule.expanded = false;
nextRule.optional = true;
}
}
_rollbackRule() {
if (!this.state.rules.length) {
return;
}
const popRule = () => {
const lastPoppedRule = this.state.rules.pop();
if (lastPoppedRule.eatNextOnFail === true) {
this.state.rules.pop();
}
};
const poppedRule = this.state.rules.pop();
if (!poppedRule) {
return;
}
let popped = 0;
let nextRule = this._getNextRule();
while (
nextRule &&
(poppedRule.kind !== RuleKind.LIST_OF_TYPE_CONSTRAINT ||
nextRule.expanded) &&
nextRule.depth > poppedRule.depth - 1
) {
this.state.rules.pop();
popped++;
nextRule = this._getNextRule();
}
if (nextRule && nextRule.expanded) {
if (nextRule.optional === true) {
popRule();
} else {
if (
nextRule.kind === RuleKind.LIST_OF_TYPE_CONSTRAINT &&
popped === 1
) {
this.state.rules.pop();
return;
}
this._rollbackRule();
}
}
}
_pushRule(
baseRule: any,
depth: number,
name?: string,
step?: number,
state?: string,
) {
this.state.name = null;
this.state.type = null;
let rule = baseRule;
switch (this._getRuleKind(rule)) {
case RuleKind.RULE_NAME:
rule = (rule: GraphQLGrammarRuleName);
this._pushRule(
GraphQLGrammar[rule],
depth,
(typeof name === 'string' ? name : undefined) || rule,
step,
state,
);
break;
case RuleKind.CONSTRAINTS_SET:
rule = (rule: GraphQLGrammarConstraintsSet);
this.state.rules.push({
name: name || '',
depth,
expanded: false,
constraints: rule,
constraintsSet: true,
kind: RuleKind.CONSTRAINTS_SET_ROOT,
state:
(typeof name === 'string' ? name : undefined) ||
(typeof state === 'string' ? state : undefined) ||
this._getNextRule()?.state ||
'',
step:
typeof step === 'number'
? step
: (this._getNextRule()?.step || 0) + 1,
});
break;
case RuleKind.OF_TYPE_CONSTRAINT:
rule = (rule: GraphQLGrammarOfTypeConstraint);
this.state.rules.push({
name: name || '',
ofType: rule.ofType,
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
depth,
expanded: false,
kind: RuleKind.OF_TYPE_CONSTRAINT,
state:
(typeof rule.tokenName === 'string' ? rule.tokenName : undefined) ||
(typeof name === 'string' ? name : undefined) ||
(typeof state === 'string' ? state : undefined) ||
this._getNextRule()?.state ||
'',
step:
typeof step === 'number'
? step
: (this._getNextRule()?.step || 0) + 1,
});
break;
case RuleKind.LIST_OF_TYPE_CONSTRAINT:
rule = (rule: GraphQLGrammarListOfTypeConstraint);
this.state.rules.push({
listOfType: rule.listOfType,
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
name: name || '',
depth,
expanded: false,
kind: RuleKind.LIST_OF_TYPE_CONSTRAINT,
state:
(typeof name === 'string' ? name : undefined) ||
(typeof state === 'string' ? state : undefined) ||
this._getNextRule()?.state ||
'',
step:
typeof step === 'number'
? step
: (this._getNextRule()?.step || 0) + 1,
});
break;
case RuleKind.TOKEN_CONSTRAINT:
rule = (rule: GraphQLGrammarTokenConstraint);
this.state.rules.push({
token: rule.token,
ofValue: rule.ofValue,
oneOf: rule.oneOf,
definitionName: Boolean(rule.definitionName),
typeName: Boolean(rule.typeName),
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
name: name || '',
depth,
expanded: false,
kind: RuleKind.TOKEN_CONSTRAINT,
state:
(typeof rule.tokenName === 'string' ? rule.tokenName : undefined) ||
(typeof state === 'string' ? state : undefined) ||
this._getNextRule()?.state ||
'',
step:
typeof step === 'number'
? step
: (this._getNextRule()?.step || 0) + 1,
});
break;
case RuleKind.PEEK_CONSTRAINT:
rule = (rule: GraphQLGrammarPeekConstraint);
this.state.rules.push({
peek: rule.peek,
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
name: name || '',
depth,
index: -1,
matched: false,
expanded: false,
kind: RuleKind.PEEK_CONSTRAINT,
state:
(typeof state === 'string' ? state : undefined) ||
this._getNextRule()?.state ||
'',
step:
typeof step === 'number'
? step
: (this._getNextRule()?.step || 0) + 1,
});
break;
}
}
_getRuleKind(rule: GraphQLGrammarRule | OnlineParserRule): string {
if (Array.isArray(rule)) {
return RuleKind.CONSTRAINTS_SET;
}
if (rule.constraintsSet === true) {
return RuleKind.CONSTRAINTS_SET_ROOT;
}
if (typeof rule === 'string') {
return RuleKind.RULE_NAME;
}
if (Object.prototype.hasOwnProperty.call(rule, 'ofType')) {
return RuleKind.OF_TYPE_CONSTRAINT;
}
if (Object.prototype.hasOwnProperty.call(rule, 'listOfType')) {
return RuleKind.LIST_OF_TYPE_CONSTRAINT;
}
if (Object.prototype.hasOwnProperty.call(rule, 'peek')) {
return RuleKind.PEEK_CONSTRAINT;
}
if (Object.prototype.hasOwnProperty.call(rule, 'token')) {
return RuleKind.TOKEN_CONSTRAINT;
}
return RuleKind.INVALID;
}
_advanceToken(): LexerToken {
return (this._lexer.advance(): any);
}
_lookAhead(): LexerToken {
try {
return (this._lexer.lookahead(): any);
} catch (err) {
return { kind: TokenKind.INVALID, value: '' };
}
}
}

View File

@@ -0,0 +1,587 @@
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
import { Lexer } from "../lexer.mjs";
import { Source } from "../source.mjs";
import GraphQLGrammar from "./grammar.mjs";
export var TokenKind = {
NAME: 'Name',
INT: 'Int',
FLOAT: 'Float',
STRING: 'String',
BLOCK_STRING: 'BlockString',
COMMENT: 'Comment',
PUNCTUATION: 'Punctuation',
EOF: '<EOF>',
INVALID: 'Invalid'
};
export var RuleKind = {
TOKEN_CONSTRAINT: 'TokenConstraint',
OF_TYPE_CONSTRAINT: 'OfTypeConstraint',
LIST_OF_TYPE_CONSTRAINT: 'ListOfTypeConstraint',
PEEK_CONSTRAINT: 'PeekConstraint',
CONSTRAINTS_SET: 'ConstraintsSet',
CONSTRAINTS_SET_ROOT: 'ConstraintsSetRoot',
RULE_NAME: 'RuleName',
INVALID: 'Invalid'
};
export var OnlineParser = /*#__PURE__*/function () {
function OnlineParser(source, state, config) {
var _config$tabSize;
this.state = state || OnlineParser.startState();
this._config = {
tabSize: (_config$tabSize = config === null || config === void 0 ? void 0 : config.tabSize) !== null && _config$tabSize !== void 0 ? _config$tabSize : 2
};
this._lexer = new Lexer(new Source(source));
}
OnlineParser.startState = function startState() {
return {
rules: [// $FlowFixMe[cannot-spread-interface]
_objectSpread(_objectSpread({
name: 'Document',
state: 'Document',
kind: 'ListOfTypeConstraint'
}, GraphQLGrammar.Document), {}, {
expanded: false,
depth: 1,
step: 1
})],
name: null,
type: null,
levels: [],
indentLevel: 0,
kind: function kind() {
var _this$rules;
return ((_this$rules = this.rules[this.rules.length - 1]) === null || _this$rules === void 0 ? void 0 : _this$rules.state) || '';
},
step: function step() {
var _this$rules2;
return ((_this$rules2 = this.rules[this.rules.length - 1]) === null || _this$rules2 === void 0 ? void 0 : _this$rules2.step) || 0;
}
};
};
OnlineParser.copyState = function copyState(state) {
return {
name: state.name,
type: state.type,
rules: JSON.parse(JSON.stringify(state.rules)),
levels: [].concat(state.levels),
indentLevel: state.indentLevel,
kind: function kind() {
var _this$rules3;
return ((_this$rules3 = this.rules[this.rules.length - 1]) === null || _this$rules3 === void 0 ? void 0 : _this$rules3.state) || '';
},
step: function step() {
var _this$rules4;
return ((_this$rules4 = this.rules[this.rules.length - 1]) === null || _this$rules4 === void 0 ? void 0 : _this$rules4.step) || 0;
}
};
};
var _proto = OnlineParser.prototype;
_proto.sol = function sol() {
return this._lexer.source.locationOffset.line === 1 && this._lexer.source.locationOffset.column === 1;
};
_proto.parseToken = function parseToken() {
var rule = this._getNextRule();
if (this.sol()) {
this.state.indentLevel = Math.floor(this.indentation() / this._config.tabSize);
}
if (!rule) {
return {
kind: TokenKind.INVALID,
value: ''
};
}
var token;
if (this._lookAhead().kind === '<EOF>') {
return {
kind: TokenKind.EOF,
value: '',
ruleName: rule.name
};
}
switch (rule.kind) {
case RuleKind.TOKEN_CONSTRAINT:
token = this._parseTokenConstraint(rule);
break;
case RuleKind.LIST_OF_TYPE_CONSTRAINT:
token = this._parseListOfTypeConstraint(rule);
break;
case RuleKind.OF_TYPE_CONSTRAINT:
token = this._parseOfTypeConstraint(rule);
break;
case RuleKind.PEEK_CONSTRAINT:
token = this._parsePeekConstraint(rule);
break;
case RuleKind.CONSTRAINTS_SET_ROOT:
token = this._parseConstraintsSetRule(rule);
break;
default:
return {
kind: TokenKind.INVALID,
value: '',
ruleName: rule.name
};
}
if (token && token.kind === TokenKind.INVALID) {
if (rule.optional === true) {
this.state.rules.pop();
} else {
this._rollbackRule();
}
return this.parseToken() || token;
}
return token;
};
_proto.indentation = function indentation() {
var match = this._lexer.source.body.match(/\s*/);
var indent = 0;
if (match && match.length === 0) {
var whiteSpaces = match[0];
var pos = 0;
while (whiteSpaces.length > pos) {
if (whiteSpaces.charCodeAt(pos) === 9) {
indent += 2;
} else {
indent++;
}
pos++;
}
}
return indent;
};
_proto._parseTokenConstraint = function _parseTokenConstraint(rule) {
rule.expanded = true;
var token = this._lookAhead();
if (!this._matchToken(token, rule)) {
return {
kind: TokenKind.INVALID,
value: '',
tokenName: rule.tokenName,
ruleName: rule.name
};
}
this._advanceToken();
var parserToken = this._transformLexerToken(token, rule);
this._popMatchedRule(parserToken);
return parserToken;
};
_proto._parseListOfTypeConstraint = function _parseListOfTypeConstraint(rule) {
this._pushRule(GraphQLGrammar[rule.listOfType], rule.depth + 1, rule.listOfType, 1, rule.state);
rule.expanded = true;
var token = this.parseToken();
return token;
};
_proto._parseOfTypeConstraint = function _parseOfTypeConstraint(rule) {
if (rule.expanded) {
this._popMatchedRule();
return this.parseToken();
}
this._pushRule(rule.ofType, rule.depth + 1, rule.tokenName, 1, rule.state);
rule.expanded = true;
var token = this.parseToken();
return token;
};
_proto._parsePeekConstraint = function _parsePeekConstraint(rule) {
if (rule.expanded) {
this._popMatchedRule();
return this.parseToken();
}
while (!rule.matched && rule.index < rule.peek.length - 1) {
rule.index++;
var constraint = rule.peek[rule.index];
var ifCondition = constraint.ifCondition;
if (typeof ifCondition === 'string') {
ifCondition = GraphQLGrammar[ifCondition];
}
var token = this._lookAhead();
if (ifCondition && this._matchToken(token, ifCondition)) {
rule.matched = true;
rule.expanded = true;
this._pushRule(constraint.expect, rule.depth + 1, '', 1, rule.state);
token = this.parseToken();
return token;
}
}
return {
kind: TokenKind.INVALID,
value: '',
ruleName: rule.name
};
};
_proto._parseConstraintsSetRule = function _parseConstraintsSetRule(rule) {
if (rule.expanded) {
this._popMatchedRule();
return this.parseToken();
}
for (var index = rule.constraints.length - 1; index >= 0; index--) {
this._pushRule(rule.constraints[index], rule.depth + 1, '', index, rule.state);
}
rule.expanded = true;
return this.parseToken();
};
_proto._matchToken = function _matchToken(token, rule) {
if (typeof token.value === 'string') {
if (typeof rule.ofValue === 'string' && token.value !== rule.ofValue || Array.isArray(rule.oneOf) && !rule.oneOf.includes(token.value) || typeof rule.ofValue !== 'string' && !Array.isArray(rule.oneOf) && token.kind !== rule.token) {
return false;
}
return this._butNot(token, rule);
}
if (token.kind !== rule.token) {
return false;
}
return this._butNot(token, rule);
};
_proto._butNot = function _butNot(token, rule) {
var _this = this;
if (rule.butNot) {
if (Array.isArray(rule.butNot)) {
if (rule.butNot.reduce(function (matched, constraint) {
return matched || _this._matchToken(token, constraint);
}, false)) {
return false;
}
return true;
}
return !this._matchToken(token, rule.butNot);
}
return true;
};
_proto._transformLexerToken = function _transformLexerToken(lexerToken, rule) {
var token;
var ruleName = rule.name || '';
var tokenName = rule.tokenName || '';
if (lexerToken.kind === '<EOF>' || lexerToken.value !== undefined) {
token = {
kind: lexerToken.kind,
value: lexerToken.value || '',
tokenName: tokenName,
ruleName: ruleName
};
if (token.kind === TokenKind.STRING) {
token.value = "\"".concat(token.value, "\"");
} else if (token.kind === TokenKind.BLOCK_STRING) {
token.value = "\"\"\"".concat(token.value, "\"\"\"");
}
} else {
token = {
kind: TokenKind.PUNCTUATION,
value: lexerToken.kind,
tokenName: tokenName,
ruleName: ruleName
};
if (/^[{([]/.test(token.value)) {
if (this.state.indentLevel !== undefined) {
this.state.levels = this.state.levels.concat(this.state.indentLevel + 1);
}
} else if (/^[})\]]/.test(token.value)) {
this.state.levels.pop();
}
}
return token;
};
_proto._getNextRule = function _getNextRule() {
return this.state.rules[this.state.rules.length - 1] || null;
};
_proto._popMatchedRule = function _popMatchedRule(token) {
var rule = this.state.rules.pop();
if (!rule) {
return;
}
if (token && rule.kind === RuleKind.TOKEN_CONSTRAINT) {
var constraint = rule;
if (typeof constraint.definitionName === 'string') {
this.state.name = token.value || null;
} else if (typeof constraint.typeName === 'string') {
this.state.type = token.value || null;
}
}
var nextRule = this._getNextRule();
if (!nextRule) {
return;
}
if (nextRule.depth === rule.depth - 1 && nextRule.expanded && nextRule.kind === RuleKind.CONSTRAINTS_SET_ROOT) {
this.state.rules.pop();
}
if (nextRule.depth === rule.depth - 1 && nextRule.expanded && nextRule.kind === RuleKind.LIST_OF_TYPE_CONSTRAINT) {
nextRule.expanded = false;
nextRule.optional = true;
}
};
_proto._rollbackRule = function _rollbackRule() {
var _this2 = this;
if (!this.state.rules.length) {
return;
}
var popRule = function popRule() {
var lastPoppedRule = _this2.state.rules.pop();
if (lastPoppedRule.eatNextOnFail === true) {
_this2.state.rules.pop();
}
};
var poppedRule = this.state.rules.pop();
if (!poppedRule) {
return;
}
var popped = 0;
var nextRule = this._getNextRule();
while (nextRule && (poppedRule.kind !== RuleKind.LIST_OF_TYPE_CONSTRAINT || nextRule.expanded) && nextRule.depth > poppedRule.depth - 1) {
this.state.rules.pop();
popped++;
nextRule = this._getNextRule();
}
if (nextRule && nextRule.expanded) {
if (nextRule.optional === true) {
popRule();
} else {
if (nextRule.kind === RuleKind.LIST_OF_TYPE_CONSTRAINT && popped === 1) {
this.state.rules.pop();
return;
}
this._rollbackRule();
}
}
};
_proto._pushRule = function _pushRule(baseRule, depth, name, step, state) {
var _this$_getNextRule, _this$_getNextRule2, _this$_getNextRule3, _this$_getNextRule4, _this$_getNextRule5, _this$_getNextRule6, _this$_getNextRule7, _this$_getNextRule8, _this$_getNextRule9, _this$_getNextRule10;
this.state.name = null;
this.state.type = null;
var rule = baseRule;
switch (this._getRuleKind(rule)) {
case RuleKind.RULE_NAME:
rule = rule;
this._pushRule(GraphQLGrammar[rule], depth, (typeof name === 'string' ? name : undefined) || rule, step, state);
break;
case RuleKind.CONSTRAINTS_SET:
rule = rule;
this.state.rules.push({
name: name || '',
depth: depth,
expanded: false,
constraints: rule,
constraintsSet: true,
kind: RuleKind.CONSTRAINTS_SET_ROOT,
state: (typeof name === 'string' ? name : undefined) || (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule = this._getNextRule()) === null || _this$_getNextRule === void 0 ? void 0 : _this$_getNextRule.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule2 = this._getNextRule()) === null || _this$_getNextRule2 === void 0 ? void 0 : _this$_getNextRule2.step) || 0) + 1
});
break;
case RuleKind.OF_TYPE_CONSTRAINT:
rule = rule;
this.state.rules.push({
name: name || '',
ofType: rule.ofType,
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
depth: depth,
expanded: false,
kind: RuleKind.OF_TYPE_CONSTRAINT,
state: (typeof rule.tokenName === 'string' ? rule.tokenName : undefined) || (typeof name === 'string' ? name : undefined) || (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule3 = this._getNextRule()) === null || _this$_getNextRule3 === void 0 ? void 0 : _this$_getNextRule3.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule4 = this._getNextRule()) === null || _this$_getNextRule4 === void 0 ? void 0 : _this$_getNextRule4.step) || 0) + 1
});
break;
case RuleKind.LIST_OF_TYPE_CONSTRAINT:
rule = rule;
this.state.rules.push({
listOfType: rule.listOfType,
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
name: name || '',
depth: depth,
expanded: false,
kind: RuleKind.LIST_OF_TYPE_CONSTRAINT,
state: (typeof name === 'string' ? name : undefined) || (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule5 = this._getNextRule()) === null || _this$_getNextRule5 === void 0 ? void 0 : _this$_getNextRule5.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule6 = this._getNextRule()) === null || _this$_getNextRule6 === void 0 ? void 0 : _this$_getNextRule6.step) || 0) + 1
});
break;
case RuleKind.TOKEN_CONSTRAINT:
rule = rule;
this.state.rules.push({
token: rule.token,
ofValue: rule.ofValue,
oneOf: rule.oneOf,
definitionName: Boolean(rule.definitionName),
typeName: Boolean(rule.typeName),
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
name: name || '',
depth: depth,
expanded: false,
kind: RuleKind.TOKEN_CONSTRAINT,
state: (typeof rule.tokenName === 'string' ? rule.tokenName : undefined) || (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule7 = this._getNextRule()) === null || _this$_getNextRule7 === void 0 ? void 0 : _this$_getNextRule7.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule8 = this._getNextRule()) === null || _this$_getNextRule8 === void 0 ? void 0 : _this$_getNextRule8.step) || 0) + 1
});
break;
case RuleKind.PEEK_CONSTRAINT:
rule = rule;
this.state.rules.push({
peek: rule.peek,
optional: Boolean(rule.optional),
butNot: rule.butNot,
eatNextOnFail: Boolean(rule.eatNextOnFail),
name: name || '',
depth: depth,
index: -1,
matched: false,
expanded: false,
kind: RuleKind.PEEK_CONSTRAINT,
state: (typeof state === 'string' ? state : undefined) || ((_this$_getNextRule9 = this._getNextRule()) === null || _this$_getNextRule9 === void 0 ? void 0 : _this$_getNextRule9.state) || '',
step: typeof step === 'number' ? step : (((_this$_getNextRule10 = this._getNextRule()) === null || _this$_getNextRule10 === void 0 ? void 0 : _this$_getNextRule10.step) || 0) + 1
});
break;
}
};
_proto._getRuleKind = function _getRuleKind(rule) {
if (Array.isArray(rule)) {
return RuleKind.CONSTRAINTS_SET;
}
if (rule.constraintsSet === true) {
return RuleKind.CONSTRAINTS_SET_ROOT;
}
if (typeof rule === 'string') {
return RuleKind.RULE_NAME;
}
if (Object.prototype.hasOwnProperty.call(rule, 'ofType')) {
return RuleKind.OF_TYPE_CONSTRAINT;
}
if (Object.prototype.hasOwnProperty.call(rule, 'listOfType')) {
return RuleKind.LIST_OF_TYPE_CONSTRAINT;
}
if (Object.prototype.hasOwnProperty.call(rule, 'peek')) {
return RuleKind.PEEK_CONSTRAINT;
}
if (Object.prototype.hasOwnProperty.call(rule, 'token')) {
return RuleKind.TOKEN_CONSTRAINT;
}
return RuleKind.INVALID;
};
_proto._advanceToken = function _advanceToken() {
return this._lexer.advance();
};
_proto._lookAhead = function _lookAhead() {
try {
return this._lexer.lookahead();
} catch (err) {
return {
kind: TokenKind.INVALID,
value: ''
};
}
};
return OnlineParser;
}();

View File

@@ -0,0 +1,96 @@
export { Source } from './source';
export { getLocation, SourceLocation } from './location';
export { printLocation, printSourceLocation } from './printLocation';
export { Kind, KindEnum } from './kinds';
export { TokenKind, TokenKindEnum } from './tokenKind';
export { Lexer } from './lexer';
export { parse, parseValue, parseType, ParseOptions } from './parser';
export { print } from './printer';
export {
visit,
visitInParallel,
getVisitFn,
BREAK,
ASTVisitor,
Visitor,
VisitFn,
VisitorKeyMap,
ASTVisitorKeyMap,
} from './visitor';
export {
Location,
Token,
ASTNode,
ASTKindToNode,
// Each kind of AST node
NameNode,
DocumentNode,
DefinitionNode,
ExecutableDefinitionNode,
OperationDefinitionNode,
OperationTypeNode,
VariableDefinitionNode,
VariableNode,
SelectionSetNode,
SelectionNode,
FieldNode,
ArgumentNode,
FragmentSpreadNode,
InlineFragmentNode,
FragmentDefinitionNode,
ValueNode,
IntValueNode,
FloatValueNode,
StringValueNode,
BooleanValueNode,
NullValueNode,
EnumValueNode,
ListValueNode,
ObjectValueNode,
ObjectFieldNode,
DirectiveNode,
TypeNode,
NamedTypeNode,
ListTypeNode,
NonNullTypeNode,
TypeSystemDefinitionNode,
SchemaDefinitionNode,
OperationTypeDefinitionNode,
TypeDefinitionNode,
ScalarTypeDefinitionNode,
ObjectTypeDefinitionNode,
FieldDefinitionNode,
InputValueDefinitionNode,
InterfaceTypeDefinitionNode,
UnionTypeDefinitionNode,
EnumTypeDefinitionNode,
EnumValueDefinitionNode,
InputObjectTypeDefinitionNode,
DirectiveDefinitionNode,
TypeSystemExtensionNode,
SchemaExtensionNode,
TypeExtensionNode,
ScalarTypeExtensionNode,
ObjectTypeExtensionNode,
InterfaceTypeExtensionNode,
UnionTypeExtensionNode,
EnumTypeExtensionNode,
InputObjectTypeExtensionNode,
} from './ast';
export {
isDefinitionNode,
isExecutableDefinitionNode,
isSelectionNode,
isValueNode,
isTypeNode,
isTypeSystemDefinitionNode,
isTypeDefinitionNode,
isTypeSystemExtensionNode,
isTypeExtensionNode,
} from './predicates';
export { DirectiveLocation, DirectiveLocationEnum } from './directiveLocation';

View File

@@ -0,0 +1,191 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "Source", {
enumerable: true,
get: function get() {
return _source.Source;
}
});
Object.defineProperty(exports, "getLocation", {
enumerable: true,
get: function get() {
return _location.getLocation;
}
});
Object.defineProperty(exports, "printLocation", {
enumerable: true,
get: function get() {
return _printLocation.printLocation;
}
});
Object.defineProperty(exports, "printSourceLocation", {
enumerable: true,
get: function get() {
return _printLocation.printSourceLocation;
}
});
Object.defineProperty(exports, "Kind", {
enumerable: true,
get: function get() {
return _kinds.Kind;
}
});
Object.defineProperty(exports, "TokenKind", {
enumerable: true,
get: function get() {
return _tokenKind.TokenKind;
}
});
Object.defineProperty(exports, "Lexer", {
enumerable: true,
get: function get() {
return _lexer.Lexer;
}
});
Object.defineProperty(exports, "parse", {
enumerable: true,
get: function get() {
return _parser.parse;
}
});
Object.defineProperty(exports, "parseValue", {
enumerable: true,
get: function get() {
return _parser.parseValue;
}
});
Object.defineProperty(exports, "parseType", {
enumerable: true,
get: function get() {
return _parser.parseType;
}
});
Object.defineProperty(exports, "print", {
enumerable: true,
get: function get() {
return _printer.print;
}
});
Object.defineProperty(exports, "visit", {
enumerable: true,
get: function get() {
return _visitor.visit;
}
});
Object.defineProperty(exports, "visitInParallel", {
enumerable: true,
get: function get() {
return _visitor.visitInParallel;
}
});
Object.defineProperty(exports, "getVisitFn", {
enumerable: true,
get: function get() {
return _visitor.getVisitFn;
}
});
Object.defineProperty(exports, "BREAK", {
enumerable: true,
get: function get() {
return _visitor.BREAK;
}
});
Object.defineProperty(exports, "Location", {
enumerable: true,
get: function get() {
return _ast.Location;
}
});
Object.defineProperty(exports, "Token", {
enumerable: true,
get: function get() {
return _ast.Token;
}
});
Object.defineProperty(exports, "isDefinitionNode", {
enumerable: true,
get: function get() {
return _predicates.isDefinitionNode;
}
});
Object.defineProperty(exports, "isExecutableDefinitionNode", {
enumerable: true,
get: function get() {
return _predicates.isExecutableDefinitionNode;
}
});
Object.defineProperty(exports, "isSelectionNode", {
enumerable: true,
get: function get() {
return _predicates.isSelectionNode;
}
});
Object.defineProperty(exports, "isValueNode", {
enumerable: true,
get: function get() {
return _predicates.isValueNode;
}
});
Object.defineProperty(exports, "isTypeNode", {
enumerable: true,
get: function get() {
return _predicates.isTypeNode;
}
});
Object.defineProperty(exports, "isTypeSystemDefinitionNode", {
enumerable: true,
get: function get() {
return _predicates.isTypeSystemDefinitionNode;
}
});
Object.defineProperty(exports, "isTypeDefinitionNode", {
enumerable: true,
get: function get() {
return _predicates.isTypeDefinitionNode;
}
});
Object.defineProperty(exports, "isTypeSystemExtensionNode", {
enumerable: true,
get: function get() {
return _predicates.isTypeSystemExtensionNode;
}
});
Object.defineProperty(exports, "isTypeExtensionNode", {
enumerable: true,
get: function get() {
return _predicates.isTypeExtensionNode;
}
});
Object.defineProperty(exports, "DirectiveLocation", {
enumerable: true,
get: function get() {
return _directiveLocation.DirectiveLocation;
}
});
var _source = require("./source.js");
var _location = require("./location.js");
var _printLocation = require("./printLocation.js");
var _kinds = require("./kinds.js");
var _tokenKind = require("./tokenKind.js");
var _lexer = require("./lexer.js");
var _parser = require("./parser.js");
var _printer = require("./printer.js");
var _visitor = require("./visitor.js");
var _ast = require("./ast.js");
var _predicates = require("./predicates.js");
var _directiveLocation = require("./directiveLocation.js");

View File

@@ -0,0 +1,98 @@
// @flow strict
export { Source } from './source';
export { getLocation } from './location';
export type { SourceLocation } from './location';
export { printLocation, printSourceLocation } from './printLocation';
export { Kind } from './kinds';
export type { KindEnum } from './kinds';
export { TokenKind } from './tokenKind';
export type { TokenKindEnum } from './tokenKind';
export { Lexer } from './lexer';
export { parse, parseValue, parseType } from './parser';
export type { ParseOptions } from './parser';
export { print } from './printer';
export { visit, visitInParallel, getVisitFn, BREAK } from './visitor';
export type { ASTVisitor, Visitor, VisitFn, VisitorKeyMap } from './visitor';
export { Location, Token } from './ast';
export type {
ASTNode,
ASTKindToNode,
// Each kind of AST node
NameNode,
DocumentNode,
DefinitionNode,
ExecutableDefinitionNode,
OperationDefinitionNode,
OperationTypeNode,
VariableDefinitionNode,
VariableNode,
SelectionSetNode,
SelectionNode,
FieldNode,
ArgumentNode,
FragmentSpreadNode,
InlineFragmentNode,
FragmentDefinitionNode,
ValueNode,
IntValueNode,
FloatValueNode,
StringValueNode,
BooleanValueNode,
NullValueNode,
EnumValueNode,
ListValueNode,
ObjectValueNode,
ObjectFieldNode,
DirectiveNode,
TypeNode,
NamedTypeNode,
ListTypeNode,
NonNullTypeNode,
TypeSystemDefinitionNode,
SchemaDefinitionNode,
OperationTypeDefinitionNode,
TypeDefinitionNode,
ScalarTypeDefinitionNode,
ObjectTypeDefinitionNode,
FieldDefinitionNode,
InputValueDefinitionNode,
InterfaceTypeDefinitionNode,
UnionTypeDefinitionNode,
EnumTypeDefinitionNode,
EnumValueDefinitionNode,
InputObjectTypeDefinitionNode,
DirectiveDefinitionNode,
TypeSystemExtensionNode,
SchemaExtensionNode,
TypeExtensionNode,
ScalarTypeExtensionNode,
ObjectTypeExtensionNode,
InterfaceTypeExtensionNode,
UnionTypeExtensionNode,
EnumTypeExtensionNode,
InputObjectTypeExtensionNode,
} from './ast';
export {
isDefinitionNode,
isExecutableDefinitionNode,
isSelectionNode,
isValueNode,
isTypeNode,
isTypeSystemDefinitionNode,
isTypeDefinitionNode,
isTypeSystemExtensionNode,
isTypeExtensionNode,
} from './predicates';
export { DirectiveLocation } from './directiveLocation';
export type { DirectiveLocationEnum } from './directiveLocation';

View File

@@ -0,0 +1,12 @@
export { Source } from "./source.mjs";
export { getLocation } from "./location.mjs";
export { printLocation, printSourceLocation } from "./printLocation.mjs";
export { Kind } from "./kinds.mjs";
export { TokenKind } from "./tokenKind.mjs";
export { Lexer } from "./lexer.mjs";
export { parse, parseValue, parseType } from "./parser.mjs";
export { print } from "./printer.mjs";
export { visit, visitInParallel, getVisitFn, BREAK } from "./visitor.mjs";
export { Location, Token } from "./ast.mjs";
export { isDefinitionNode, isExecutableDefinitionNode, isSelectionNode, isValueNode, isTypeNode, isTypeSystemDefinitionNode, isTypeDefinitionNode, isTypeSystemExtensionNode, isTypeExtensionNode } from "./predicates.mjs";
export { DirectiveLocation } from "./directiveLocation.mjs";

View File

@@ -0,0 +1,74 @@
/**
* The set of allowed kind values for AST nodes.
*/
export const Kind: {
// Name
NAME: 'Name';
// Document
DOCUMENT: 'Document';
OPERATION_DEFINITION: 'OperationDefinition';
VARIABLE_DEFINITION: 'VariableDefinition';
SELECTION_SET: 'SelectionSet';
FIELD: 'Field';
ARGUMENT: 'Argument';
// Fragments
FRAGMENT_SPREAD: 'FragmentSpread';
INLINE_FRAGMENT: 'InlineFragment';
FRAGMENT_DEFINITION: 'FragmentDefinition';
// Values
VARIABLE: 'Variable';
INT: 'IntValue';
FLOAT: 'FloatValue';
STRING: 'StringValue';
BOOLEAN: 'BooleanValue';
NULL: 'NullValue';
ENUM: 'EnumValue';
LIST: 'ListValue';
OBJECT: 'ObjectValue';
OBJECT_FIELD: 'ObjectField';
// Directives
DIRECTIVE: 'Directive';
// Types
NAMED_TYPE: 'NamedType';
LIST_TYPE: 'ListType';
NON_NULL_TYPE: 'NonNullType';
// Type System Definitions
SCHEMA_DEFINITION: 'SchemaDefinition';
OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition';
// Type Definitions
SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition';
OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition';
FIELD_DEFINITION: 'FieldDefinition';
INPUT_VALUE_DEFINITION: 'InputValueDefinition';
INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition';
UNION_TYPE_DEFINITION: 'UnionTypeDefinition';
ENUM_TYPE_DEFINITION: 'EnumTypeDefinition';
ENUM_VALUE_DEFINITION: 'EnumValueDefinition';
INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition';
// Directive Definitions
DIRECTIVE_DEFINITION: 'DirectiveDefinition';
// Type System Extensions
SCHEMA_EXTENSION: 'SchemaExtension';
// Type Extensions
SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension';
OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension';
INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension';
UNION_TYPE_EXTENSION: 'UnionTypeExtension';
ENUM_TYPE_EXTENSION: 'EnumTypeExtension';
INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension';
};
/**
* The enum type representing the possible kind values of AST nodes.
*/
export type KindEnum = typeof Kind[keyof typeof Kind];

View File

@@ -0,0 +1,71 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.Kind = void 0;
/**
* The set of allowed kind values for AST nodes.
*/
var Kind = Object.freeze({
// Name
NAME: 'Name',
// Document
DOCUMENT: 'Document',
OPERATION_DEFINITION: 'OperationDefinition',
VARIABLE_DEFINITION: 'VariableDefinition',
SELECTION_SET: 'SelectionSet',
FIELD: 'Field',
ARGUMENT: 'Argument',
// Fragments
FRAGMENT_SPREAD: 'FragmentSpread',
INLINE_FRAGMENT: 'InlineFragment',
FRAGMENT_DEFINITION: 'FragmentDefinition',
// Values
VARIABLE: 'Variable',
INT: 'IntValue',
FLOAT: 'FloatValue',
STRING: 'StringValue',
BOOLEAN: 'BooleanValue',
NULL: 'NullValue',
ENUM: 'EnumValue',
LIST: 'ListValue',
OBJECT: 'ObjectValue',
OBJECT_FIELD: 'ObjectField',
// Directives
DIRECTIVE: 'Directive',
// Types
NAMED_TYPE: 'NamedType',
LIST_TYPE: 'ListType',
NON_NULL_TYPE: 'NonNullType',
// Type System Definitions
SCHEMA_DEFINITION: 'SchemaDefinition',
OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition',
// Type Definitions
SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition',
OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition',
FIELD_DEFINITION: 'FieldDefinition',
INPUT_VALUE_DEFINITION: 'InputValueDefinition',
INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition',
UNION_TYPE_DEFINITION: 'UnionTypeDefinition',
ENUM_TYPE_DEFINITION: 'EnumTypeDefinition',
ENUM_VALUE_DEFINITION: 'EnumValueDefinition',
INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition',
// Directive Definitions
DIRECTIVE_DEFINITION: 'DirectiveDefinition',
// Type System Extensions
SCHEMA_EXTENSION: 'SchemaExtension',
// Type Extensions
SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension',
OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension',
INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension',
UNION_TYPE_EXTENSION: 'UnionTypeExtension',
ENUM_TYPE_EXTENSION: 'EnumTypeExtension',
INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension'
});
/**
* The enum type representing the possible kind values of AST nodes.
*/
exports.Kind = Kind;

View File

@@ -0,0 +1,75 @@
// @flow strict
/**
* The set of allowed kind values for AST nodes.
*/
export const Kind = Object.freeze({
// Name
NAME: 'Name',
// Document
DOCUMENT: 'Document',
OPERATION_DEFINITION: 'OperationDefinition',
VARIABLE_DEFINITION: 'VariableDefinition',
SELECTION_SET: 'SelectionSet',
FIELD: 'Field',
ARGUMENT: 'Argument',
// Fragments
FRAGMENT_SPREAD: 'FragmentSpread',
INLINE_FRAGMENT: 'InlineFragment',
FRAGMENT_DEFINITION: 'FragmentDefinition',
// Values
VARIABLE: 'Variable',
INT: 'IntValue',
FLOAT: 'FloatValue',
STRING: 'StringValue',
BOOLEAN: 'BooleanValue',
NULL: 'NullValue',
ENUM: 'EnumValue',
LIST: 'ListValue',
OBJECT: 'ObjectValue',
OBJECT_FIELD: 'ObjectField',
// Directives
DIRECTIVE: 'Directive',
// Types
NAMED_TYPE: 'NamedType',
LIST_TYPE: 'ListType',
NON_NULL_TYPE: 'NonNullType',
// Type System Definitions
SCHEMA_DEFINITION: 'SchemaDefinition',
OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition',
// Type Definitions
SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition',
OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition',
FIELD_DEFINITION: 'FieldDefinition',
INPUT_VALUE_DEFINITION: 'InputValueDefinition',
INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition',
UNION_TYPE_DEFINITION: 'UnionTypeDefinition',
ENUM_TYPE_DEFINITION: 'EnumTypeDefinition',
ENUM_VALUE_DEFINITION: 'EnumValueDefinition',
INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition',
// Directive Definitions
DIRECTIVE_DEFINITION: 'DirectiveDefinition',
// Type System Extensions
SCHEMA_EXTENSION: 'SchemaExtension',
// Type Extensions
SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension',
OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension',
INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension',
UNION_TYPE_EXTENSION: 'UnionTypeExtension',
ENUM_TYPE_EXTENSION: 'EnumTypeExtension',
INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension',
});
/**
* The enum type representing the possible kind values of AST nodes.
*/
export type KindEnum = $Values<typeof Kind>;

View File

@@ -0,0 +1,62 @@
/**
* The set of allowed kind values for AST nodes.
*/
export var Kind = Object.freeze({
// Name
NAME: 'Name',
// Document
DOCUMENT: 'Document',
OPERATION_DEFINITION: 'OperationDefinition',
VARIABLE_DEFINITION: 'VariableDefinition',
SELECTION_SET: 'SelectionSet',
FIELD: 'Field',
ARGUMENT: 'Argument',
// Fragments
FRAGMENT_SPREAD: 'FragmentSpread',
INLINE_FRAGMENT: 'InlineFragment',
FRAGMENT_DEFINITION: 'FragmentDefinition',
// Values
VARIABLE: 'Variable',
INT: 'IntValue',
FLOAT: 'FloatValue',
STRING: 'StringValue',
BOOLEAN: 'BooleanValue',
NULL: 'NullValue',
ENUM: 'EnumValue',
LIST: 'ListValue',
OBJECT: 'ObjectValue',
OBJECT_FIELD: 'ObjectField',
// Directives
DIRECTIVE: 'Directive',
// Types
NAMED_TYPE: 'NamedType',
LIST_TYPE: 'ListType',
NON_NULL_TYPE: 'NonNullType',
// Type System Definitions
SCHEMA_DEFINITION: 'SchemaDefinition',
OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition',
// Type Definitions
SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition',
OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition',
FIELD_DEFINITION: 'FieldDefinition',
INPUT_VALUE_DEFINITION: 'InputValueDefinition',
INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition',
UNION_TYPE_DEFINITION: 'UnionTypeDefinition',
ENUM_TYPE_DEFINITION: 'EnumTypeDefinition',
ENUM_VALUE_DEFINITION: 'EnumValueDefinition',
INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition',
// Directive Definitions
DIRECTIVE_DEFINITION: 'DirectiveDefinition',
// Type System Extensions
SCHEMA_EXTENSION: 'SchemaExtension',
// Type Extensions
SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension',
OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension',
INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension',
UNION_TYPE_EXTENSION: 'UnionTypeExtension',
ENUM_TYPE_EXTENSION: 'EnumTypeExtension',
INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension'
});
/**
* The enum type representing the possible kind values of AST nodes.
*/

View File

@@ -0,0 +1,58 @@
import { Token } from './ast';
import { Source } from './source';
import { TokenKindEnum } from './tokenKind';
/**
* Given a Source object, this returns a Lexer for that source.
* A Lexer is a stateful stream generator in that every time
* it is advanced, it returns the next token in the Source. Assuming the
* source lexes, the final Token emitted by the lexer will be of kind
* EOF, after which the lexer will repeatedly return the same EOF token
* whenever called.
*/
export class Lexer {
source: Source;
/**
* The previously focused non-ignored token.
*/
lastToken: Token;
/**
* The currently focused non-ignored token.
*/
token: Token;
/**
* The (1-indexed) line containing the current token.
*/
line: number;
/**
* The character offset at which the current line begins.
*/
lineStart: number;
constructor(source: Source);
/**
* Advances the token stream to the next non-ignored token.
*/
advance(): Token;
/**
* Looks ahead and returns the next non-ignored token, but does not change
* the state of Lexer.
*/
lookahead(): Token;
}
/**
* @internal
*/
export function isPunctuatorToken(token: Token): boolean;
/**
* @internal
*/
export function isPunctuatorTokenKind(kind: TokenKindEnum): boolean;

View File

@@ -0,0 +1,690 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.isPunctuatorTokenKind = isPunctuatorTokenKind;
exports.Lexer = void 0;
var _syntaxError = require("../error/syntaxError.js");
var _ast = require("./ast.js");
var _tokenKind = require("./tokenKind.js");
var _blockString = require("./blockString.js");
/**
* Given a Source object, creates a Lexer for that source.
* A Lexer is a stateful stream generator in that every time
* it is advanced, it returns the next token in the Source. Assuming the
* source lexes, the final Token emitted by the lexer will be of kind
* EOF, after which the lexer will repeatedly return the same EOF token
* whenever called.
*/
var Lexer = /*#__PURE__*/function () {
/**
* The previously focused non-ignored token.
*/
/**
* The currently focused non-ignored token.
*/
/**
* The (1-indexed) line containing the current token.
*/
/**
* The character offset at which the current line begins.
*/
function Lexer(source) {
var startOfFileToken = new _ast.Token(_tokenKind.TokenKind.SOF, 0, 0, 0, 0, null);
this.source = source;
this.lastToken = startOfFileToken;
this.token = startOfFileToken;
this.line = 1;
this.lineStart = 0;
}
/**
* Advances the token stream to the next non-ignored token.
*/
var _proto = Lexer.prototype;
_proto.advance = function advance() {
this.lastToken = this.token;
var token = this.token = this.lookahead();
return token;
}
/**
* Looks ahead and returns the next non-ignored token, but does not change
* the state of Lexer.
*/
;
_proto.lookahead = function lookahead() {
var token = this.token;
if (token.kind !== _tokenKind.TokenKind.EOF) {
do {
var _token$next;
// Note: next is only mutable during parsing, so we cast to allow this.
token = (_token$next = token.next) !== null && _token$next !== void 0 ? _token$next : token.next = readToken(this, token);
} while (token.kind === _tokenKind.TokenKind.COMMENT);
}
return token;
};
return Lexer;
}();
/**
* @internal
*/
exports.Lexer = Lexer;
function isPunctuatorTokenKind(kind) {
return kind === _tokenKind.TokenKind.BANG || kind === _tokenKind.TokenKind.DOLLAR || kind === _tokenKind.TokenKind.AMP || kind === _tokenKind.TokenKind.PAREN_L || kind === _tokenKind.TokenKind.PAREN_R || kind === _tokenKind.TokenKind.SPREAD || kind === _tokenKind.TokenKind.COLON || kind === _tokenKind.TokenKind.EQUALS || kind === _tokenKind.TokenKind.AT || kind === _tokenKind.TokenKind.BRACKET_L || kind === _tokenKind.TokenKind.BRACKET_R || kind === _tokenKind.TokenKind.BRACE_L || kind === _tokenKind.TokenKind.PIPE || kind === _tokenKind.TokenKind.BRACE_R;
}
function printCharCode(code) {
return (// NaN/undefined represents access beyond the end of the file.
isNaN(code) ? _tokenKind.TokenKind.EOF : // Trust JSON for ASCII.
code < 0x007f ? JSON.stringify(String.fromCharCode(code)) : // Otherwise print the escaped form.
"\"\\u".concat(('00' + code.toString(16).toUpperCase()).slice(-4), "\"")
);
}
/**
* Gets the next token from the source starting at the given position.
*
* This skips over whitespace until it finds the next lexable token, then lexes
* punctuators immediately or calls the appropriate helper function for more
* complicated tokens.
*/
function readToken(lexer, prev) {
var source = lexer.source;
var body = source.body;
var bodyLength = body.length;
var pos = prev.end;
while (pos < bodyLength) {
var code = body.charCodeAt(pos);
var _line = lexer.line;
var _col = 1 + pos - lexer.lineStart; // SourceCharacter
switch (code) {
case 0xfeff: // <BOM>
case 9: // \t
case 32: // <space>
case 44:
// ,
++pos;
continue;
case 10:
// \n
++pos;
++lexer.line;
lexer.lineStart = pos;
continue;
case 13:
// \r
if (body.charCodeAt(pos + 1) === 10) {
pos += 2;
} else {
++pos;
}
++lexer.line;
lexer.lineStart = pos;
continue;
case 33:
// !
return new _ast.Token(_tokenKind.TokenKind.BANG, pos, pos + 1, _line, _col, prev);
case 35:
// #
return readComment(source, pos, _line, _col, prev);
case 36:
// $
return new _ast.Token(_tokenKind.TokenKind.DOLLAR, pos, pos + 1, _line, _col, prev);
case 38:
// &
return new _ast.Token(_tokenKind.TokenKind.AMP, pos, pos + 1, _line, _col, prev);
case 40:
// (
return new _ast.Token(_tokenKind.TokenKind.PAREN_L, pos, pos + 1, _line, _col, prev);
case 41:
// )
return new _ast.Token(_tokenKind.TokenKind.PAREN_R, pos, pos + 1, _line, _col, prev);
case 46:
// .
if (body.charCodeAt(pos + 1) === 46 && body.charCodeAt(pos + 2) === 46) {
return new _ast.Token(_tokenKind.TokenKind.SPREAD, pos, pos + 3, _line, _col, prev);
}
break;
case 58:
// :
return new _ast.Token(_tokenKind.TokenKind.COLON, pos, pos + 1, _line, _col, prev);
case 61:
// =
return new _ast.Token(_tokenKind.TokenKind.EQUALS, pos, pos + 1, _line, _col, prev);
case 64:
// @
return new _ast.Token(_tokenKind.TokenKind.AT, pos, pos + 1, _line, _col, prev);
case 91:
// [
return new _ast.Token(_tokenKind.TokenKind.BRACKET_L, pos, pos + 1, _line, _col, prev);
case 93:
// ]
return new _ast.Token(_tokenKind.TokenKind.BRACKET_R, pos, pos + 1, _line, _col, prev);
case 123:
// {
return new _ast.Token(_tokenKind.TokenKind.BRACE_L, pos, pos + 1, _line, _col, prev);
case 124:
// |
return new _ast.Token(_tokenKind.TokenKind.PIPE, pos, pos + 1, _line, _col, prev);
case 125:
// }
return new _ast.Token(_tokenKind.TokenKind.BRACE_R, pos, pos + 1, _line, _col, prev);
case 34:
// "
if (body.charCodeAt(pos + 1) === 34 && body.charCodeAt(pos + 2) === 34) {
return readBlockString(source, pos, _line, _col, prev, lexer);
}
return readString(source, pos, _line, _col, prev);
case 45: // -
case 48: // 0
case 49: // 1
case 50: // 2
case 51: // 3
case 52: // 4
case 53: // 5
case 54: // 6
case 55: // 7
case 56: // 8
case 57:
// 9
return readNumber(source, pos, code, _line, _col, prev);
case 65: // A
case 66: // B
case 67: // C
case 68: // D
case 69: // E
case 70: // F
case 71: // G
case 72: // H
case 73: // I
case 74: // J
case 75: // K
case 76: // L
case 77: // M
case 78: // N
case 79: // O
case 80: // P
case 81: // Q
case 82: // R
case 83: // S
case 84: // T
case 85: // U
case 86: // V
case 87: // W
case 88: // X
case 89: // Y
case 90: // Z
case 95: // _
case 97: // a
case 98: // b
case 99: // c
case 100: // d
case 101: // e
case 102: // f
case 103: // g
case 104: // h
case 105: // i
case 106: // j
case 107: // k
case 108: // l
case 109: // m
case 110: // n
case 111: // o
case 112: // p
case 113: // q
case 114: // r
case 115: // s
case 116: // t
case 117: // u
case 118: // v
case 119: // w
case 120: // x
case 121: // y
case 122:
// z
return readName(source, pos, _line, _col, prev);
}
throw (0, _syntaxError.syntaxError)(source, pos, unexpectedCharacterMessage(code));
}
var line = lexer.line;
var col = 1 + pos - lexer.lineStart;
return new _ast.Token(_tokenKind.TokenKind.EOF, bodyLength, bodyLength, line, col, prev);
}
/**
* Report a message that an unexpected character was encountered.
*/
function unexpectedCharacterMessage(code) {
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
return "Cannot contain the invalid character ".concat(printCharCode(code), ".");
}
if (code === 39) {
// '
return 'Unexpected single quote character (\'), did you mean to use a double quote (")?';
}
return "Cannot parse the unexpected character ".concat(printCharCode(code), ".");
}
/**
* Reads a comment token from the source file.
*
* #[\u0009\u0020-\uFFFF]*
*/
function readComment(source, start, line, col, prev) {
var body = source.body;
var code;
var position = start;
do {
code = body.charCodeAt(++position);
} while (!isNaN(code) && ( // SourceCharacter but not LineTerminator
code > 0x001f || code === 0x0009));
return new _ast.Token(_tokenKind.TokenKind.COMMENT, start, position, line, col, prev, body.slice(start + 1, position));
}
/**
* Reads a number token from the source file, either a float
* or an int depending on whether a decimal point appears.
*
* Int: -?(0|[1-9][0-9]*)
* Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)?
*/
function readNumber(source, start, firstCode, line, col, prev) {
var body = source.body;
var code = firstCode;
var position = start;
var isFloat = false;
if (code === 45) {
// -
code = body.charCodeAt(++position);
}
if (code === 48) {
// 0
code = body.charCodeAt(++position);
if (code >= 48 && code <= 57) {
throw (0, _syntaxError.syntaxError)(source, position, "Invalid number, unexpected digit after 0: ".concat(printCharCode(code), "."));
}
} else {
position = readDigits(source, position, code);
code = body.charCodeAt(position);
}
if (code === 46) {
// .
isFloat = true;
code = body.charCodeAt(++position);
position = readDigits(source, position, code);
code = body.charCodeAt(position);
}
if (code === 69 || code === 101) {
// E e
isFloat = true;
code = body.charCodeAt(++position);
if (code === 43 || code === 45) {
// + -
code = body.charCodeAt(++position);
}
position = readDigits(source, position, code);
code = body.charCodeAt(position);
} // Numbers cannot be followed by . or NameStart
if (code === 46 || isNameStart(code)) {
throw (0, _syntaxError.syntaxError)(source, position, "Invalid number, expected digit but got: ".concat(printCharCode(code), "."));
}
return new _ast.Token(isFloat ? _tokenKind.TokenKind.FLOAT : _tokenKind.TokenKind.INT, start, position, line, col, prev, body.slice(start, position));
}
/**
* Returns the new position in the source after reading digits.
*/
function readDigits(source, start, firstCode) {
var body = source.body;
var position = start;
var code = firstCode;
if (code >= 48 && code <= 57) {
// 0 - 9
do {
code = body.charCodeAt(++position);
} while (code >= 48 && code <= 57); // 0 - 9
return position;
}
throw (0, _syntaxError.syntaxError)(source, position, "Invalid number, expected digit but got: ".concat(printCharCode(code), "."));
}
/**
* Reads a string token from the source file.
*
* "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*"
*/
function readString(source, start, line, col, prev) {
var body = source.body;
var position = start + 1;
var chunkStart = position;
var code = 0;
var value = '';
while (position < body.length && !isNaN(code = body.charCodeAt(position)) && // not LineTerminator
code !== 0x000a && code !== 0x000d) {
// Closing Quote (")
if (code === 34) {
value += body.slice(chunkStart, position);
return new _ast.Token(_tokenKind.TokenKind.STRING, start, position + 1, line, col, prev, value);
} // SourceCharacter
if (code < 0x0020 && code !== 0x0009) {
throw (0, _syntaxError.syntaxError)(source, position, "Invalid character within String: ".concat(printCharCode(code), "."));
}
++position;
if (code === 92) {
// \
value += body.slice(chunkStart, position - 1);
code = body.charCodeAt(position);
switch (code) {
case 34:
value += '"';
break;
case 47:
value += '/';
break;
case 92:
value += '\\';
break;
case 98:
value += '\b';
break;
case 102:
value += '\f';
break;
case 110:
value += '\n';
break;
case 114:
value += '\r';
break;
case 116:
value += '\t';
break;
case 117:
{
// uXXXX
var charCode = uniCharCode(body.charCodeAt(position + 1), body.charCodeAt(position + 2), body.charCodeAt(position + 3), body.charCodeAt(position + 4));
if (charCode < 0) {
var invalidSequence = body.slice(position + 1, position + 5);
throw (0, _syntaxError.syntaxError)(source, position, "Invalid character escape sequence: \\u".concat(invalidSequence, "."));
}
value += String.fromCharCode(charCode);
position += 4;
break;
}
default:
throw (0, _syntaxError.syntaxError)(source, position, "Invalid character escape sequence: \\".concat(String.fromCharCode(code), "."));
}
++position;
chunkStart = position;
}
}
throw (0, _syntaxError.syntaxError)(source, position, 'Unterminated string.');
}
/**
* Reads a block string token from the source file.
*
* """("?"?(\\"""|\\(?!=""")|[^"\\]))*"""
*/
function readBlockString(source, start, line, col, prev, lexer) {
var body = source.body;
var position = start + 3;
var chunkStart = position;
var code = 0;
var rawValue = '';
while (position < body.length && !isNaN(code = body.charCodeAt(position))) {
// Closing Triple-Quote (""")
if (code === 34 && body.charCodeAt(position + 1) === 34 && body.charCodeAt(position + 2) === 34) {
rawValue += body.slice(chunkStart, position);
return new _ast.Token(_tokenKind.TokenKind.BLOCK_STRING, start, position + 3, line, col, prev, (0, _blockString.dedentBlockStringValue)(rawValue));
} // SourceCharacter
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
throw (0, _syntaxError.syntaxError)(source, position, "Invalid character within String: ".concat(printCharCode(code), "."));
}
if (code === 10) {
// new line
++position;
++lexer.line;
lexer.lineStart = position;
} else if (code === 13) {
// carriage return
if (body.charCodeAt(position + 1) === 10) {
position += 2;
} else {
++position;
}
++lexer.line;
lexer.lineStart = position;
} else if ( // Escape Triple-Quote (\""")
code === 92 && body.charCodeAt(position + 1) === 34 && body.charCodeAt(position + 2) === 34 && body.charCodeAt(position + 3) === 34) {
rawValue += body.slice(chunkStart, position) + '"""';
position += 4;
chunkStart = position;
} else {
++position;
}
}
throw (0, _syntaxError.syntaxError)(source, position, 'Unterminated string.');
}
/**
* Converts four hexadecimal chars to the integer that the
* string represents. For example, uniCharCode('0','0','0','f')
* will return 15, and uniCharCode('0','0','f','f') returns 255.
*
* Returns a negative number on error, if a char was invalid.
*
* This is implemented by noting that char2hex() returns -1 on error,
* which means the result of ORing the char2hex() will also be negative.
*/
function uniCharCode(a, b, c, d) {
return char2hex(a) << 12 | char2hex(b) << 8 | char2hex(c) << 4 | char2hex(d);
}
/**
* Converts a hex character to its integer value.
* '0' becomes 0, '9' becomes 9
* 'A' becomes 10, 'F' becomes 15
* 'a' becomes 10, 'f' becomes 15
*
* Returns -1 on error.
*/
function char2hex(a) {
return a >= 48 && a <= 57 ? a - 48 // 0-9
: a >= 65 && a <= 70 ? a - 55 // A-F
: a >= 97 && a <= 102 ? a - 87 // a-f
: -1;
}
/**
* Reads an alphanumeric + underscore name from the source.
*
* [_A-Za-z][_0-9A-Za-z]*
*/
function readName(source, start, line, col, prev) {
var body = source.body;
var bodyLength = body.length;
var position = start + 1;
var code = 0;
while (position !== bodyLength && !isNaN(code = body.charCodeAt(position)) && (code === 95 || // _
code >= 48 && code <= 57 || // 0-9
code >= 65 && code <= 90 || // A-Z
code >= 97 && code <= 122) // a-z
) {
++position;
}
return new _ast.Token(_tokenKind.TokenKind.NAME, start, position, line, col, prev, body.slice(start, position));
} // _ A-Z a-z
function isNameStart(code) {
return code === 95 || code >= 65 && code <= 90 || code >= 97 && code <= 122;
}

View File

@@ -0,0 +1,701 @@
// @flow strict
import { syntaxError } from '../error/syntaxError';
import type { Source } from './source';
import type { TokenKindEnum } from './tokenKind';
import { Token } from './ast';
import { TokenKind } from './tokenKind';
import { dedentBlockStringValue } from './blockString';
/**
* Given a Source object, creates a Lexer for that source.
* A Lexer is a stateful stream generator in that every time
* it is advanced, it returns the next token in the Source. Assuming the
* source lexes, the final Token emitted by the lexer will be of kind
* EOF, after which the lexer will repeatedly return the same EOF token
* whenever called.
*/
export class Lexer {
source: Source;
/**
* The previously focused non-ignored token.
*/
lastToken: Token;
/**
* The currently focused non-ignored token.
*/
token: Token;
/**
* The (1-indexed) line containing the current token.
*/
line: number;
/**
* The character offset at which the current line begins.
*/
lineStart: number;
constructor(source: Source) {
const startOfFileToken = new Token(TokenKind.SOF, 0, 0, 0, 0, null);
this.source = source;
this.lastToken = startOfFileToken;
this.token = startOfFileToken;
this.line = 1;
this.lineStart = 0;
}
/**
* Advances the token stream to the next non-ignored token.
*/
advance(): Token {
this.lastToken = this.token;
const token = (this.token = this.lookahead());
return token;
}
/**
* Looks ahead and returns the next non-ignored token, but does not change
* the state of Lexer.
*/
lookahead(): Token {
let token = this.token;
if (token.kind !== TokenKind.EOF) {
do {
// Note: next is only mutable during parsing, so we cast to allow this.
token = token.next ?? ((token: any).next = readToken(this, token));
} while (token.kind === TokenKind.COMMENT);
}
return token;
}
}
/**
* @internal
*/
export function isPunctuatorTokenKind(kind: TokenKindEnum): boolean %checks {
return (
kind === TokenKind.BANG ||
kind === TokenKind.DOLLAR ||
kind === TokenKind.AMP ||
kind === TokenKind.PAREN_L ||
kind === TokenKind.PAREN_R ||
kind === TokenKind.SPREAD ||
kind === TokenKind.COLON ||
kind === TokenKind.EQUALS ||
kind === TokenKind.AT ||
kind === TokenKind.BRACKET_L ||
kind === TokenKind.BRACKET_R ||
kind === TokenKind.BRACE_L ||
kind === TokenKind.PIPE ||
kind === TokenKind.BRACE_R
);
}
function printCharCode(code: number): string {
return (
// NaN/undefined represents access beyond the end of the file.
isNaN(code)
? TokenKind.EOF
: // Trust JSON for ASCII.
code < 0x007f
? JSON.stringify(String.fromCharCode(code))
: // Otherwise print the escaped form.
`"\\u${('00' + code.toString(16).toUpperCase()).slice(-4)}"`
);
}
/**
* Gets the next token from the source starting at the given position.
*
* This skips over whitespace until it finds the next lexable token, then lexes
* punctuators immediately or calls the appropriate helper function for more
* complicated tokens.
*/
function readToken(lexer: Lexer, prev: Token): Token {
const source = lexer.source;
const body = source.body;
const bodyLength = body.length;
let pos = prev.end;
while (pos < bodyLength) {
const code = body.charCodeAt(pos);
const line = lexer.line;
const col = 1 + pos - lexer.lineStart;
// SourceCharacter
switch (code) {
case 0xfeff: // <BOM>
case 9: // \t
case 32: // <space>
case 44: // ,
++pos;
continue;
case 10: // \n
++pos;
++lexer.line;
lexer.lineStart = pos;
continue;
case 13: // \r
if (body.charCodeAt(pos + 1) === 10) {
pos += 2;
} else {
++pos;
}
++lexer.line;
lexer.lineStart = pos;
continue;
case 33: // !
return new Token(TokenKind.BANG, pos, pos + 1, line, col, prev);
case 35: // #
return readComment(source, pos, line, col, prev);
case 36: // $
return new Token(TokenKind.DOLLAR, pos, pos + 1, line, col, prev);
case 38: // &
return new Token(TokenKind.AMP, pos, pos + 1, line, col, prev);
case 40: // (
return new Token(TokenKind.PAREN_L, pos, pos + 1, line, col, prev);
case 41: // )
return new Token(TokenKind.PAREN_R, pos, pos + 1, line, col, prev);
case 46: // .
if (
body.charCodeAt(pos + 1) === 46 &&
body.charCodeAt(pos + 2) === 46
) {
return new Token(TokenKind.SPREAD, pos, pos + 3, line, col, prev);
}
break;
case 58: // :
return new Token(TokenKind.COLON, pos, pos + 1, line, col, prev);
case 61: // =
return new Token(TokenKind.EQUALS, pos, pos + 1, line, col, prev);
case 64: // @
return new Token(TokenKind.AT, pos, pos + 1, line, col, prev);
case 91: // [
return new Token(TokenKind.BRACKET_L, pos, pos + 1, line, col, prev);
case 93: // ]
return new Token(TokenKind.BRACKET_R, pos, pos + 1, line, col, prev);
case 123: // {
return new Token(TokenKind.BRACE_L, pos, pos + 1, line, col, prev);
case 124: // |
return new Token(TokenKind.PIPE, pos, pos + 1, line, col, prev);
case 125: // }
return new Token(TokenKind.BRACE_R, pos, pos + 1, line, col, prev);
case 34: // "
if (
body.charCodeAt(pos + 1) === 34 &&
body.charCodeAt(pos + 2) === 34
) {
return readBlockString(source, pos, line, col, prev, lexer);
}
return readString(source, pos, line, col, prev);
case 45: // -
case 48: // 0
case 49: // 1
case 50: // 2
case 51: // 3
case 52: // 4
case 53: // 5
case 54: // 6
case 55: // 7
case 56: // 8
case 57: // 9
return readNumber(source, pos, code, line, col, prev);
case 65: // A
case 66: // B
case 67: // C
case 68: // D
case 69: // E
case 70: // F
case 71: // G
case 72: // H
case 73: // I
case 74: // J
case 75: // K
case 76: // L
case 77: // M
case 78: // N
case 79: // O
case 80: // P
case 81: // Q
case 82: // R
case 83: // S
case 84: // T
case 85: // U
case 86: // V
case 87: // W
case 88: // X
case 89: // Y
case 90: // Z
case 95: // _
case 97: // a
case 98: // b
case 99: // c
case 100: // d
case 101: // e
case 102: // f
case 103: // g
case 104: // h
case 105: // i
case 106: // j
case 107: // k
case 108: // l
case 109: // m
case 110: // n
case 111: // o
case 112: // p
case 113: // q
case 114: // r
case 115: // s
case 116: // t
case 117: // u
case 118: // v
case 119: // w
case 120: // x
case 121: // y
case 122: // z
return readName(source, pos, line, col, prev);
}
throw syntaxError(source, pos, unexpectedCharacterMessage(code));
}
const line = lexer.line;
const col = 1 + pos - lexer.lineStart;
return new Token(TokenKind.EOF, bodyLength, bodyLength, line, col, prev);
}
/**
* Report a message that an unexpected character was encountered.
*/
function unexpectedCharacterMessage(code: number): string {
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
return `Cannot contain the invalid character ${printCharCode(code)}.`;
}
if (code === 39) {
// '
return 'Unexpected single quote character (\'), did you mean to use a double quote (")?';
}
return `Cannot parse the unexpected character ${printCharCode(code)}.`;
}
/**
* Reads a comment token from the source file.
*
* #[\u0009\u0020-\uFFFF]*
*/
function readComment(
source: Source,
start: number,
line: number,
col: number,
prev: Token | null,
): Token {
const body = source.body;
let code;
let position = start;
do {
code = body.charCodeAt(++position);
} while (
!isNaN(code) &&
// SourceCharacter but not LineTerminator
(code > 0x001f || code === 0x0009)
);
return new Token(
TokenKind.COMMENT,
start,
position,
line,
col,
prev,
body.slice(start + 1, position),
);
}
/**
* Reads a number token from the source file, either a float
* or an int depending on whether a decimal point appears.
*
* Int: -?(0|[1-9][0-9]*)
* Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)?
*/
function readNumber(
source: Source,
start: number,
firstCode: number,
line: number,
col: number,
prev: Token | null,
): Token {
const body = source.body;
let code = firstCode;
let position = start;
let isFloat = false;
if (code === 45) {
// -
code = body.charCodeAt(++position);
}
if (code === 48) {
// 0
code = body.charCodeAt(++position);
if (code >= 48 && code <= 57) {
throw syntaxError(
source,
position,
`Invalid number, unexpected digit after 0: ${printCharCode(code)}.`,
);
}
} else {
position = readDigits(source, position, code);
code = body.charCodeAt(position);
}
if (code === 46) {
// .
isFloat = true;
code = body.charCodeAt(++position);
position = readDigits(source, position, code);
code = body.charCodeAt(position);
}
if (code === 69 || code === 101) {
// E e
isFloat = true;
code = body.charCodeAt(++position);
if (code === 43 || code === 45) {
// + -
code = body.charCodeAt(++position);
}
position = readDigits(source, position, code);
code = body.charCodeAt(position);
}
// Numbers cannot be followed by . or NameStart
if (code === 46 || isNameStart(code)) {
throw syntaxError(
source,
position,
`Invalid number, expected digit but got: ${printCharCode(code)}.`,
);
}
return new Token(
isFloat ? TokenKind.FLOAT : TokenKind.INT,
start,
position,
line,
col,
prev,
body.slice(start, position),
);
}
/**
* Returns the new position in the source after reading digits.
*/
function readDigits(source: Source, start: number, firstCode: number): number {
const body = source.body;
let position = start;
let code = firstCode;
if (code >= 48 && code <= 57) {
// 0 - 9
do {
code = body.charCodeAt(++position);
} while (code >= 48 && code <= 57); // 0 - 9
return position;
}
throw syntaxError(
source,
position,
`Invalid number, expected digit but got: ${printCharCode(code)}.`,
);
}
/**
* Reads a string token from the source file.
*
* "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*"
*/
function readString(
source: Source,
start: number,
line: number,
col: number,
prev: Token | null,
): Token {
const body = source.body;
let position = start + 1;
let chunkStart = position;
let code = 0;
let value = '';
while (
position < body.length &&
!isNaN((code = body.charCodeAt(position))) &&
// not LineTerminator
code !== 0x000a &&
code !== 0x000d
) {
// Closing Quote (")
if (code === 34) {
value += body.slice(chunkStart, position);
return new Token(
TokenKind.STRING,
start,
position + 1,
line,
col,
prev,
value,
);
}
// SourceCharacter
if (code < 0x0020 && code !== 0x0009) {
throw syntaxError(
source,
position,
`Invalid character within String: ${printCharCode(code)}.`,
);
}
++position;
if (code === 92) {
// \
value += body.slice(chunkStart, position - 1);
code = body.charCodeAt(position);
switch (code) {
case 34:
value += '"';
break;
case 47:
value += '/';
break;
case 92:
value += '\\';
break;
case 98:
value += '\b';
break;
case 102:
value += '\f';
break;
case 110:
value += '\n';
break;
case 114:
value += '\r';
break;
case 116:
value += '\t';
break;
case 117: {
// uXXXX
const charCode = uniCharCode(
body.charCodeAt(position + 1),
body.charCodeAt(position + 2),
body.charCodeAt(position + 3),
body.charCodeAt(position + 4),
);
if (charCode < 0) {
const invalidSequence = body.slice(position + 1, position + 5);
throw syntaxError(
source,
position,
`Invalid character escape sequence: \\u${invalidSequence}.`,
);
}
value += String.fromCharCode(charCode);
position += 4;
break;
}
default:
throw syntaxError(
source,
position,
`Invalid character escape sequence: \\${String.fromCharCode(
code,
)}.`,
);
}
++position;
chunkStart = position;
}
}
throw syntaxError(source, position, 'Unterminated string.');
}
/**
* Reads a block string token from the source file.
*
* """("?"?(\\"""|\\(?!=""")|[^"\\]))*"""
*/
function readBlockString(
source: Source,
start: number,
line: number,
col: number,
prev: Token | null,
lexer: Lexer,
): Token {
const body = source.body;
let position = start + 3;
let chunkStart = position;
let code = 0;
let rawValue = '';
while (position < body.length && !isNaN((code = body.charCodeAt(position)))) {
// Closing Triple-Quote (""")
if (
code === 34 &&
body.charCodeAt(position + 1) === 34 &&
body.charCodeAt(position + 2) === 34
) {
rawValue += body.slice(chunkStart, position);
return new Token(
TokenKind.BLOCK_STRING,
start,
position + 3,
line,
col,
prev,
dedentBlockStringValue(rawValue),
);
}
// SourceCharacter
if (
code < 0x0020 &&
code !== 0x0009 &&
code !== 0x000a &&
code !== 0x000d
) {
throw syntaxError(
source,
position,
`Invalid character within String: ${printCharCode(code)}.`,
);
}
if (code === 10) {
// new line
++position;
++lexer.line;
lexer.lineStart = position;
} else if (code === 13) {
// carriage return
if (body.charCodeAt(position + 1) === 10) {
position += 2;
} else {
++position;
}
++lexer.line;
lexer.lineStart = position;
} else if (
// Escape Triple-Quote (\""")
code === 92 &&
body.charCodeAt(position + 1) === 34 &&
body.charCodeAt(position + 2) === 34 &&
body.charCodeAt(position + 3) === 34
) {
rawValue += body.slice(chunkStart, position) + '"""';
position += 4;
chunkStart = position;
} else {
++position;
}
}
throw syntaxError(source, position, 'Unterminated string.');
}
/**
* Converts four hexadecimal chars to the integer that the
* string represents. For example, uniCharCode('0','0','0','f')
* will return 15, and uniCharCode('0','0','f','f') returns 255.
*
* Returns a negative number on error, if a char was invalid.
*
* This is implemented by noting that char2hex() returns -1 on error,
* which means the result of ORing the char2hex() will also be negative.
*/
function uniCharCode(a: number, b: number, c: number, d: number): number {
return (
(char2hex(a) << 12) | (char2hex(b) << 8) | (char2hex(c) << 4) | char2hex(d)
);
}
/**
* Converts a hex character to its integer value.
* '0' becomes 0, '9' becomes 9
* 'A' becomes 10, 'F' becomes 15
* 'a' becomes 10, 'f' becomes 15
*
* Returns -1 on error.
*/
function char2hex(a: number): number {
return a >= 48 && a <= 57
? a - 48 // 0-9
: a >= 65 && a <= 70
? a - 55 // A-F
: a >= 97 && a <= 102
? a - 87 // a-f
: -1;
}
/**
* Reads an alphanumeric + underscore name from the source.
*
* [_A-Za-z][_0-9A-Za-z]*
*/
function readName(
source: Source,
start: number,
line: number,
col: number,
prev: Token | null,
): Token {
const body = source.body;
const bodyLength = body.length;
let position = start + 1;
let code = 0;
while (
position !== bodyLength &&
!isNaN((code = body.charCodeAt(position))) &&
(code === 95 || // _
(code >= 48 && code <= 57) || // 0-9
(code >= 65 && code <= 90) || // A-Z
(code >= 97 && code <= 122)) // a-z
) {
++position;
}
return new Token(
TokenKind.NAME,
start,
position,
line,
col,
prev,
body.slice(start, position),
);
}
// _ A-Z a-z
function isNameStart(code: number): boolean {
return (
code === 95 || (code >= 65 && code <= 90) || (code >= 97 && code <= 122)
);
}

View File

@@ -0,0 +1,676 @@
import { syntaxError } from "../error/syntaxError.mjs";
import { Token } from "./ast.mjs";
import { TokenKind } from "./tokenKind.mjs";
import { dedentBlockStringValue } from "./blockString.mjs";
/**
* Given a Source object, creates a Lexer for that source.
* A Lexer is a stateful stream generator in that every time
* it is advanced, it returns the next token in the Source. Assuming the
* source lexes, the final Token emitted by the lexer will be of kind
* EOF, after which the lexer will repeatedly return the same EOF token
* whenever called.
*/
export var Lexer = /*#__PURE__*/function () {
/**
* The previously focused non-ignored token.
*/
/**
* The currently focused non-ignored token.
*/
/**
* The (1-indexed) line containing the current token.
*/
/**
* The character offset at which the current line begins.
*/
function Lexer(source) {
var startOfFileToken = new Token(TokenKind.SOF, 0, 0, 0, 0, null);
this.source = source;
this.lastToken = startOfFileToken;
this.token = startOfFileToken;
this.line = 1;
this.lineStart = 0;
}
/**
* Advances the token stream to the next non-ignored token.
*/
var _proto = Lexer.prototype;
_proto.advance = function advance() {
this.lastToken = this.token;
var token = this.token = this.lookahead();
return token;
}
/**
* Looks ahead and returns the next non-ignored token, but does not change
* the state of Lexer.
*/
;
_proto.lookahead = function lookahead() {
var token = this.token;
if (token.kind !== TokenKind.EOF) {
do {
var _token$next;
// Note: next is only mutable during parsing, so we cast to allow this.
token = (_token$next = token.next) !== null && _token$next !== void 0 ? _token$next : token.next = readToken(this, token);
} while (token.kind === TokenKind.COMMENT);
}
return token;
};
return Lexer;
}();
/**
* @internal
*/
export function isPunctuatorTokenKind(kind) {
return kind === TokenKind.BANG || kind === TokenKind.DOLLAR || kind === TokenKind.AMP || kind === TokenKind.PAREN_L || kind === TokenKind.PAREN_R || kind === TokenKind.SPREAD || kind === TokenKind.COLON || kind === TokenKind.EQUALS || kind === TokenKind.AT || kind === TokenKind.BRACKET_L || kind === TokenKind.BRACKET_R || kind === TokenKind.BRACE_L || kind === TokenKind.PIPE || kind === TokenKind.BRACE_R;
}
function printCharCode(code) {
return (// NaN/undefined represents access beyond the end of the file.
isNaN(code) ? TokenKind.EOF : // Trust JSON for ASCII.
code < 0x007f ? JSON.stringify(String.fromCharCode(code)) : // Otherwise print the escaped form.
"\"\\u".concat(('00' + code.toString(16).toUpperCase()).slice(-4), "\"")
);
}
/**
* Gets the next token from the source starting at the given position.
*
* This skips over whitespace until it finds the next lexable token, then lexes
* punctuators immediately or calls the appropriate helper function for more
* complicated tokens.
*/
function readToken(lexer, prev) {
var source = lexer.source;
var body = source.body;
var bodyLength = body.length;
var pos = prev.end;
while (pos < bodyLength) {
var code = body.charCodeAt(pos);
var _line = lexer.line;
var _col = 1 + pos - lexer.lineStart; // SourceCharacter
switch (code) {
case 0xfeff: // <BOM>
case 9: // \t
case 32: // <space>
case 44:
// ,
++pos;
continue;
case 10:
// \n
++pos;
++lexer.line;
lexer.lineStart = pos;
continue;
case 13:
// \r
if (body.charCodeAt(pos + 1) === 10) {
pos += 2;
} else {
++pos;
}
++lexer.line;
lexer.lineStart = pos;
continue;
case 33:
// !
return new Token(TokenKind.BANG, pos, pos + 1, _line, _col, prev);
case 35:
// #
return readComment(source, pos, _line, _col, prev);
case 36:
// $
return new Token(TokenKind.DOLLAR, pos, pos + 1, _line, _col, prev);
case 38:
// &
return new Token(TokenKind.AMP, pos, pos + 1, _line, _col, prev);
case 40:
// (
return new Token(TokenKind.PAREN_L, pos, pos + 1, _line, _col, prev);
case 41:
// )
return new Token(TokenKind.PAREN_R, pos, pos + 1, _line, _col, prev);
case 46:
// .
if (body.charCodeAt(pos + 1) === 46 && body.charCodeAt(pos + 2) === 46) {
return new Token(TokenKind.SPREAD, pos, pos + 3, _line, _col, prev);
}
break;
case 58:
// :
return new Token(TokenKind.COLON, pos, pos + 1, _line, _col, prev);
case 61:
// =
return new Token(TokenKind.EQUALS, pos, pos + 1, _line, _col, prev);
case 64:
// @
return new Token(TokenKind.AT, pos, pos + 1, _line, _col, prev);
case 91:
// [
return new Token(TokenKind.BRACKET_L, pos, pos + 1, _line, _col, prev);
case 93:
// ]
return new Token(TokenKind.BRACKET_R, pos, pos + 1, _line, _col, prev);
case 123:
// {
return new Token(TokenKind.BRACE_L, pos, pos + 1, _line, _col, prev);
case 124:
// |
return new Token(TokenKind.PIPE, pos, pos + 1, _line, _col, prev);
case 125:
// }
return new Token(TokenKind.BRACE_R, pos, pos + 1, _line, _col, prev);
case 34:
// "
if (body.charCodeAt(pos + 1) === 34 && body.charCodeAt(pos + 2) === 34) {
return readBlockString(source, pos, _line, _col, prev, lexer);
}
return readString(source, pos, _line, _col, prev);
case 45: // -
case 48: // 0
case 49: // 1
case 50: // 2
case 51: // 3
case 52: // 4
case 53: // 5
case 54: // 6
case 55: // 7
case 56: // 8
case 57:
// 9
return readNumber(source, pos, code, _line, _col, prev);
case 65: // A
case 66: // B
case 67: // C
case 68: // D
case 69: // E
case 70: // F
case 71: // G
case 72: // H
case 73: // I
case 74: // J
case 75: // K
case 76: // L
case 77: // M
case 78: // N
case 79: // O
case 80: // P
case 81: // Q
case 82: // R
case 83: // S
case 84: // T
case 85: // U
case 86: // V
case 87: // W
case 88: // X
case 89: // Y
case 90: // Z
case 95: // _
case 97: // a
case 98: // b
case 99: // c
case 100: // d
case 101: // e
case 102: // f
case 103: // g
case 104: // h
case 105: // i
case 106: // j
case 107: // k
case 108: // l
case 109: // m
case 110: // n
case 111: // o
case 112: // p
case 113: // q
case 114: // r
case 115: // s
case 116: // t
case 117: // u
case 118: // v
case 119: // w
case 120: // x
case 121: // y
case 122:
// z
return readName(source, pos, _line, _col, prev);
}
throw syntaxError(source, pos, unexpectedCharacterMessage(code));
}
var line = lexer.line;
var col = 1 + pos - lexer.lineStart;
return new Token(TokenKind.EOF, bodyLength, bodyLength, line, col, prev);
}
/**
* Report a message that an unexpected character was encountered.
*/
function unexpectedCharacterMessage(code) {
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
return "Cannot contain the invalid character ".concat(printCharCode(code), ".");
}
if (code === 39) {
// '
return 'Unexpected single quote character (\'), did you mean to use a double quote (")?';
}
return "Cannot parse the unexpected character ".concat(printCharCode(code), ".");
}
/**
* Reads a comment token from the source file.
*
* #[\u0009\u0020-\uFFFF]*
*/
function readComment(source, start, line, col, prev) {
var body = source.body;
var code;
var position = start;
do {
code = body.charCodeAt(++position);
} while (!isNaN(code) && ( // SourceCharacter but not LineTerminator
code > 0x001f || code === 0x0009));
return new Token(TokenKind.COMMENT, start, position, line, col, prev, body.slice(start + 1, position));
}
/**
* Reads a number token from the source file, either a float
* or an int depending on whether a decimal point appears.
*
* Int: -?(0|[1-9][0-9]*)
* Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)?
*/
function readNumber(source, start, firstCode, line, col, prev) {
var body = source.body;
var code = firstCode;
var position = start;
var isFloat = false;
if (code === 45) {
// -
code = body.charCodeAt(++position);
}
if (code === 48) {
// 0
code = body.charCodeAt(++position);
if (code >= 48 && code <= 57) {
throw syntaxError(source, position, "Invalid number, unexpected digit after 0: ".concat(printCharCode(code), "."));
}
} else {
position = readDigits(source, position, code);
code = body.charCodeAt(position);
}
if (code === 46) {
// .
isFloat = true;
code = body.charCodeAt(++position);
position = readDigits(source, position, code);
code = body.charCodeAt(position);
}
if (code === 69 || code === 101) {
// E e
isFloat = true;
code = body.charCodeAt(++position);
if (code === 43 || code === 45) {
// + -
code = body.charCodeAt(++position);
}
position = readDigits(source, position, code);
code = body.charCodeAt(position);
} // Numbers cannot be followed by . or NameStart
if (code === 46 || isNameStart(code)) {
throw syntaxError(source, position, "Invalid number, expected digit but got: ".concat(printCharCode(code), "."));
}
return new Token(isFloat ? TokenKind.FLOAT : TokenKind.INT, start, position, line, col, prev, body.slice(start, position));
}
/**
* Returns the new position in the source after reading digits.
*/
function readDigits(source, start, firstCode) {
var body = source.body;
var position = start;
var code = firstCode;
if (code >= 48 && code <= 57) {
// 0 - 9
do {
code = body.charCodeAt(++position);
} while (code >= 48 && code <= 57); // 0 - 9
return position;
}
throw syntaxError(source, position, "Invalid number, expected digit but got: ".concat(printCharCode(code), "."));
}
/**
* Reads a string token from the source file.
*
* "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*"
*/
function readString(source, start, line, col, prev) {
var body = source.body;
var position = start + 1;
var chunkStart = position;
var code = 0;
var value = '';
while (position < body.length && !isNaN(code = body.charCodeAt(position)) && // not LineTerminator
code !== 0x000a && code !== 0x000d) {
// Closing Quote (")
if (code === 34) {
value += body.slice(chunkStart, position);
return new Token(TokenKind.STRING, start, position + 1, line, col, prev, value);
} // SourceCharacter
if (code < 0x0020 && code !== 0x0009) {
throw syntaxError(source, position, "Invalid character within String: ".concat(printCharCode(code), "."));
}
++position;
if (code === 92) {
// \
value += body.slice(chunkStart, position - 1);
code = body.charCodeAt(position);
switch (code) {
case 34:
value += '"';
break;
case 47:
value += '/';
break;
case 92:
value += '\\';
break;
case 98:
value += '\b';
break;
case 102:
value += '\f';
break;
case 110:
value += '\n';
break;
case 114:
value += '\r';
break;
case 116:
value += '\t';
break;
case 117:
{
// uXXXX
var charCode = uniCharCode(body.charCodeAt(position + 1), body.charCodeAt(position + 2), body.charCodeAt(position + 3), body.charCodeAt(position + 4));
if (charCode < 0) {
var invalidSequence = body.slice(position + 1, position + 5);
throw syntaxError(source, position, "Invalid character escape sequence: \\u".concat(invalidSequence, "."));
}
value += String.fromCharCode(charCode);
position += 4;
break;
}
default:
throw syntaxError(source, position, "Invalid character escape sequence: \\".concat(String.fromCharCode(code), "."));
}
++position;
chunkStart = position;
}
}
throw syntaxError(source, position, 'Unterminated string.');
}
/**
* Reads a block string token from the source file.
*
* """("?"?(\\"""|\\(?!=""")|[^"\\]))*"""
*/
function readBlockString(source, start, line, col, prev, lexer) {
var body = source.body;
var position = start + 3;
var chunkStart = position;
var code = 0;
var rawValue = '';
while (position < body.length && !isNaN(code = body.charCodeAt(position))) {
// Closing Triple-Quote (""")
if (code === 34 && body.charCodeAt(position + 1) === 34 && body.charCodeAt(position + 2) === 34) {
rawValue += body.slice(chunkStart, position);
return new Token(TokenKind.BLOCK_STRING, start, position + 3, line, col, prev, dedentBlockStringValue(rawValue));
} // SourceCharacter
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
throw syntaxError(source, position, "Invalid character within String: ".concat(printCharCode(code), "."));
}
if (code === 10) {
// new line
++position;
++lexer.line;
lexer.lineStart = position;
} else if (code === 13) {
// carriage return
if (body.charCodeAt(position + 1) === 10) {
position += 2;
} else {
++position;
}
++lexer.line;
lexer.lineStart = position;
} else if ( // Escape Triple-Quote (\""")
code === 92 && body.charCodeAt(position + 1) === 34 && body.charCodeAt(position + 2) === 34 && body.charCodeAt(position + 3) === 34) {
rawValue += body.slice(chunkStart, position) + '"""';
position += 4;
chunkStart = position;
} else {
++position;
}
}
throw syntaxError(source, position, 'Unterminated string.');
}
/**
* Converts four hexadecimal chars to the integer that the
* string represents. For example, uniCharCode('0','0','0','f')
* will return 15, and uniCharCode('0','0','f','f') returns 255.
*
* Returns a negative number on error, if a char was invalid.
*
* This is implemented by noting that char2hex() returns -1 on error,
* which means the result of ORing the char2hex() will also be negative.
*/
function uniCharCode(a, b, c, d) {
return char2hex(a) << 12 | char2hex(b) << 8 | char2hex(c) << 4 | char2hex(d);
}
/**
* Converts a hex character to its integer value.
* '0' becomes 0, '9' becomes 9
* 'A' becomes 10, 'F' becomes 15
* 'a' becomes 10, 'f' becomes 15
*
* Returns -1 on error.
*/
function char2hex(a) {
return a >= 48 && a <= 57 ? a - 48 // 0-9
: a >= 65 && a <= 70 ? a - 55 // A-F
: a >= 97 && a <= 102 ? a - 87 // a-f
: -1;
}
/**
* Reads an alphanumeric + underscore name from the source.
*
* [_A-Za-z][_0-9A-Za-z]*
*/
function readName(source, start, line, col, prev) {
var body = source.body;
var bodyLength = body.length;
var position = start + 1;
var code = 0;
while (position !== bodyLength && !isNaN(code = body.charCodeAt(position)) && (code === 95 || // _
code >= 48 && code <= 57 || // 0-9
code >= 65 && code <= 90 || // A-Z
code >= 97 && code <= 122) // a-z
) {
++position;
}
return new Token(TokenKind.NAME, start, position, line, col, prev, body.slice(start, position));
} // _ A-Z a-z
function isNameStart(code) {
return code === 95 || code >= 65 && code <= 90 || code >= 97 && code <= 122;
}

View File

@@ -0,0 +1,15 @@
import { Source } from './source';
/**
* Represents a location in a Source.
*/
export interface SourceLocation {
readonly line: number;
readonly column: number;
}
/**
* Takes a Source and a UTF-8 character offset, and returns the corresponding
* line and column as a SourceLocation.
*/
export function getLocation(source: Source, position: number): SourceLocation;

View File

@@ -0,0 +1,31 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getLocation = getLocation;
/**
* Represents a location in a Source.
*/
/**
* Takes a Source and a UTF-8 character offset, and returns the corresponding
* line and column as a SourceLocation.
*/
function getLocation(source, position) {
var lineRegexp = /\r\n|[\n\r]/g;
var line = 1;
var column = position + 1;
var match;
while ((match = lineRegexp.exec(source.body)) && match.index < position) {
line += 1;
column = position + 1 - (match.index + match[0].length);
}
return {
line: line,
column: column
};
}

View File

@@ -0,0 +1,26 @@
// @flow strict
import type { Source } from './source';
/**
* Represents a location in a Source.
*/
export type SourceLocation = {|
+line: number,
+column: number,
|};
/**
* Takes a Source and a UTF-8 character offset, and returns the corresponding
* line and column as a SourceLocation.
*/
export function getLocation(source: Source, position: number): SourceLocation {
const lineRegexp = /\r\n|[\n\r]/g;
let line = 1;
let column = position + 1;
let match;
while ((match = lineRegexp.exec(source.body)) && match.index < position) {
line += 1;
column = position + 1 - (match.index + match[0].length);
}
return { line, column };
}

View File

@@ -0,0 +1,24 @@
/**
* Represents a location in a Source.
*/
/**
* Takes a Source and a UTF-8 character offset, and returns the corresponding
* line and column as a SourceLocation.
*/
export function getLocation(source, position) {
var lineRegexp = /\r\n|[\n\r]/g;
var line = 1;
var column = position + 1;
var match;
while ((match = lineRegexp.exec(source.body)) && match.index < position) {
line += 1;
column = position + 1 - (match.index + match[0].length);
}
return {
line: line,
column: column
};
}

View File

@@ -0,0 +1,543 @@
import { Maybe } from '../jsutils/Maybe';
import { GraphQLError } from '..';
import { TokenKindEnum } from './tokenKind';
import { Source } from './source';
import {
TypeNode,
ValueNode,
DocumentNode,
Token,
Location,
NameNode,
DirectiveDefinitionNode,
InputObjectTypeExtensionNode,
EnumTypeExtensionNode,
UnionTypeExtensionNode,
InterfaceTypeExtensionNode,
ObjectTypeExtensionNode,
ScalarTypeExtensionNode,
SchemaExtensionNode,
TypeSystemExtensionNode,
InputValueDefinitionNode,
InputObjectTypeDefinitionNode,
EnumValueDefinitionNode,
EnumTypeDefinitionNode,
NamedTypeNode,
UnionTypeDefinitionNode,
InterfaceTypeDefinitionNode,
FieldDefinitionNode,
ObjectTypeDefinitionNode,
ScalarTypeDefinitionNode,
OperationTypeDefinitionNode,
SchemaDefinitionNode,
StringValueNode,
DirectiveNode,
ObjectFieldNode,
ObjectValueNode,
FragmentSpreadNode,
InlineFragmentNode,
ArgumentNode,
FieldNode,
SelectionNode,
SelectionSetNode,
VariableNode,
VariableDefinitionNode,
OperationTypeNode,
OperationDefinitionNode,
DefinitionNode,
FragmentDefinitionNode,
ListValueNode,
} from './ast';
import { Lexer } from './lexer';
/**
* Configuration options to control parser behavior
*/
export interface ParseOptions {
/**
* By default, the parser creates AST nodes that know the location
* in the source that they correspond to. This configuration flag
* disables that behavior for performance or testing.
*/
noLocation?: boolean;
/**
* If enabled, the parser will parse empty fields sets in the Schema
* Definition Language. Otherwise, the parser will follow the current
* specification.
*
* This option is provided to ease adoption of the final SDL specification
* and will be removed in v16.
*/
allowLegacySDLEmptyFields?: boolean;
/**
* If enabled, the parser will parse implemented interfaces with no `&`
* character between each interface. Otherwise, the parser will follow the
* current specification.
*
* This option is provided to ease adoption of the final SDL specification
* and will be removed in v16.
*/
allowLegacySDLImplementsInterfaces?: boolean;
/**
* EXPERIMENTAL:
*
* If enabled, the parser will understand and parse variable definitions
* contained in a fragment definition. They'll be represented in the
* `variableDefinitions` field of the FragmentDefinitionNode.
*
* The syntax is identical to normal, query-defined variables. For example:
*
* fragment A($var: Boolean = false) on T {
* ...
* }
*
* Note: this feature is experimental and may change or be removed in the
* future.
*/
experimentalFragmentVariables?: boolean;
}
/**
* Given a GraphQL source, parses it into a Document.
* Throws GraphQLError if a syntax error is encountered.
*/
export function parse(
source: string | Source,
options?: ParseOptions,
): DocumentNode;
/**
* Given a string containing a GraphQL value, parse the AST for that value.
* Throws GraphQLError if a syntax error is encountered.
*
* This is useful within tools that operate upon GraphQL Values directly and
* in isolation of complete GraphQL documents.
*/
export function parseValue(
source: string | Source,
options?: ParseOptions,
): ValueNode;
/**
* Given a string containing a GraphQL Type (ex. `[Int!]`), parse the AST for
* that type.
* Throws GraphQLError if a syntax error is encountered.
*
* This is useful within tools that operate upon GraphQL Types directly and
* in isolation of complete GraphQL documents.
*
* Consider providing the results to the utility function: typeFromAST().
*/
export function parseType(
source: string | Source,
options?: ParseOptions,
): TypeNode;
export class Parser {
protected _lexer: Lexer;
protected _options?: ParseOptions;
constructor(source: string | Source, options?: ParseOptions);
/**
* Converts a name lex token into a name parse node.
*/
parseName(): NameNode;
/**
* Document : Definition+
*/
parseDocument(): DocumentNode;
/**
* Definition :
* - ExecutableDefinition
* - TypeSystemDefinition
* - TypeSystemExtension
*
* ExecutableDefinition :
* - OperationDefinition
* - FragmentDefinition
*
* TypeSystemDefinition :
* - SchemaDefinition
* - TypeDefinition
* - DirectiveDefinition
*
* TypeDefinition :
* - ScalarTypeDefinition
* - ObjectTypeDefinition
* - InterfaceTypeDefinition
* - UnionTypeDefinition
* - EnumTypeDefinition
* - InputObjectTypeDefinition
*/
parseDefinition(): DefinitionNode;
/**
* OperationDefinition :
* - SelectionSet
* - OperationType Name? VariableDefinitions? Directives? SelectionSet
*/
parseOperationDefinition(): OperationDefinitionNode;
/**
* OperationType : one of query mutation subscription
*/
parseOperationType(): OperationTypeNode;
/**
* VariableDefinitions : ( VariableDefinition+ )
*/
parseVariableDefinitions(): Array<VariableDefinitionNode>;
/**
* VariableDefinition : Variable : Type DefaultValue? Directives[Const]?
*/
parseVariableDefinition(): VariableDefinitionNode;
/**
* Variable : $ Name
*/
parseVariable(): VariableNode;
/**
* ```
* SelectionSet : { Selection+ }
* ```
*/
parseSelectionSet(): SelectionSetNode;
/**
* Selection :
* - Field
* - FragmentSpread
* - InlineFragment
*/
parseSelection(): SelectionNode;
/**
* Field : Alias? Name Arguments? Directives? SelectionSet?
*
* Alias : Name :
*/
parseField(): FieldNode;
/**
* Arguments[Const] : ( Argument[?Const]+ )
*/
parseArguments(): Array<ArgumentNode>;
/**
* Argument[Const] : Name : Value[?Const]
*/
parseArgument(): ArgumentNode;
/**
* Corresponds to both FragmentSpread and InlineFragment in the spec.
*
* FragmentSpread : ... FragmentName Directives?
*
* InlineFragment : ... TypeCondition? Directives? SelectionSet
*/
parseFragment(): FragmentSpreadNode | InlineFragmentNode;
/**
* FragmentDefinition :
* - fragment FragmentName on TypeCondition Directives? SelectionSet
*
* TypeCondition : NamedType
*/
parseFragmentDefinition(): FragmentDefinitionNode;
/**
* FragmentName : Name but not `on`
*/
parseFragmentName(): NameNode;
/**
* Value[Const] :
* - [~Const] Variable
* - IntValue
* - FloatValue
* - StringValue
* - BooleanValue
* - NullValue
* - EnumValue
* - ListValue[?Const]
* - ObjectValue[?Const]
*
* BooleanValue : one of `true` `false`
*
* NullValue : `null`
*
* EnumValue : Name but not `true`, `false` or `null`
*/
parseValueLiteral(): ValueNode;
parseStringLiteral(): StringValueNode;
/**
* ListValue[Const] :
* - [ ]
* - [ Value[?Const]+ ]
*/
parseList(): ListValueNode;
/**
* ```
* ObjectValue[Const] :
* - { }
* - { ObjectField[?Const]+ }
* ```
*/
parseObject(isConst: boolean): ObjectValueNode;
/**
* ObjectField[Const] : Name : Value[?Const]
*/
parseObjectField: ObjectFieldNode;
/**
* Directives[Const] : Directive[?Const]+
*/
parseDirectives(): Array<DirectiveNode>;
/**
* ```
* Directive[Const] : @ Name Arguments[?Const]?
* ```
*/
parseDirective(): DirectiveNode;
/**
* Type :
* - NamedType
* - ListType
* - NonNullType
*/
parseTypeReference(): TypeNode;
/**
* NamedType : Name
*/
parseNamedType(): NamedTypeNode;
peekDescription(): boolean;
/**
* Description : StringValue
*/
parseDescription(): undefined | StringValueNode;
/**
* ```
* SchemaDefinition : Description? schema Directives[Const]? { OperationTypeDefinition+ }
* ```
*/
parseSchemaDefinition(): SchemaDefinitionNode;
/**
* OperationTypeDefinition : OperationType : NamedType
*/
parseOperationTypeDefinition(): OperationTypeDefinitionNode;
/**
* ScalarTypeDefinition : Description? scalar Name Directives[Const]?
*/
parseScalarTypeDefinition(): ScalarTypeDefinitionNode;
/**
* ObjectTypeDefinition :
* Description?
* type Name ImplementsInterfaces? Directives[Const]? FieldsDefinition?
*/
parseObjectTypeDefinition(): ObjectTypeDefinitionNode;
/**
* ImplementsInterfaces :
* - implements `&`? NamedType
* - ImplementsInterfaces & NamedType
*/
parseImplementsInterfaces(): Array<NamedTypeNode>;
/**
* ```
* FieldsDefinition : { FieldDefinition+ }
* ```
*/
parseFieldsDefinition(): Array<FieldDefinitionNode>;
/**
* FieldDefinition :
* - Description? Name ArgumentsDefinition? : Type Directives[Const]?
*/
parseFieldDefinition(): FieldDefinitionNode;
/**
* ArgumentsDefinition : ( InputValueDefinition+ )
*/
parseArgumentDefs(): Array<InputValueDefinitionNode>;
/**
* InputValueDefinition :
* - Description? Name : Type DefaultValue? Directives[Const]?
*/
parseInputValueDef(): InputValueDefinitionNode;
/**
* InterfaceTypeDefinition :
* - Description? interface Name Directives[Const]? FieldsDefinition?
*/
parseInterfaceTypeDefinition(): InterfaceTypeDefinitionNode;
/**
* UnionTypeDefinition :
* - Description? union Name Directives[Const]? UnionMemberTypes?
*/
parseUnionTypeDefinition(): UnionTypeDefinitionNode;
/**
* UnionMemberTypes :
* - = `|`? NamedType
* - UnionMemberTypes | NamedType
*/
parseUnionMemberTypes(): Array<NamedTypeNode>;
/**
* EnumTypeDefinition :
* - Description? enum Name Directives[Const]? EnumValuesDefinition?
*/
parseEnumTypeDefinition(): EnumTypeDefinitionNode;
/**
* ```
* EnumValuesDefinition : { EnumValueDefinition+ }
* ```
*/
parseEnumValuesDefinition(): Array<EnumValueDefinitionNode>;
/**
* EnumValueDefinition : Description? EnumValue Directives[Const]?
*/
parseEnumValueDefinition(): EnumValueDefinitionNode;
/**
* EnumValue : Name but not `true`, `false` or `null`
*/
parseEnumValueName(): NameNode;
/**
* InputObjectTypeDefinition :
* - Description? input Name Directives[Const]? InputFieldsDefinition?
*/
parseInputObjectTypeDefinition(): InputObjectTypeDefinitionNode;
/**
* ```
* InputFieldsDefinition : { InputValueDefinition+ }
* ```
*/
parseInputFieldsDefinition(): Array<InputValueDefinitionNode>;
/**
* TypeSystemExtension :
* - SchemaExtension
* - TypeExtension
*
* TypeExtension :
* - ScalarTypeExtension
* - ObjectTypeExtension
* - InterfaceTypeExtension
* - UnionTypeExtension
* - EnumTypeExtension
* - InputObjectTypeDefinition
*/
parseTypeSystemExtension(): TypeSystemExtensionNode;
/**
* ```
* SchemaExtension :
* - extend schema Directives[Const]? { OperationTypeDefinition+ }
* - extend schema Directives[Const]
* ```
*/
parseSchemaExtension(): SchemaExtensionNode;
/**
* ScalarTypeExtension :
* - extend scalar Name Directives[Const]
*/
parseScalarTypeExtension(): ScalarTypeExtensionNode;
/**
* ObjectTypeExtension :
* - extend type Name ImplementsInterfaces? Directives[Const]? FieldsDefinition
* - extend type Name ImplementsInterfaces? Directives[Const]
* - extend type Name ImplementsInterfaces
*/
parseObjectTypeExtension(): ObjectTypeExtensionNode;
/**
* InterfaceTypeExtension :
* - extend interface Name ImplementsInterfaces? Directives[Const]? FieldsDefinition
* - extend interface Name ImplementsInterfaces? Directives[Const]
* - extend interface Name ImplementsInterfaces
*/
parseInterfaceTypeExtension(): InterfaceTypeExtensionNode;
/**
* UnionTypeExtension :
* - extend union Name Directives[Const]? UnionMemberTypes
* - extend union Name Directives[Const]
*/
parseUnionTypeExtension(): UnionTypeExtensionNode;
/**
* EnumTypeExtension :
* - extend enum Name Directives[Const]? EnumValuesDefinition
* - extend enum Name Directives[Const]
*/
parseEnumTypeExtension(): EnumTypeExtensionNode;
/**
* InputObjectTypeExtension :
* - extend input Name Directives[Const]? InputFieldsDefinition
* - extend input Name Directives[Const]
*/
parseInputObjectTypeExtension(): InputObjectTypeExtensionNode;
/**
* ```
* DirectiveDefinition :
* - Description? directive @ Name ArgumentsDefinition? `repeatable`? on DirectiveLocations
* ```
*/
parseDirectiveDefinition(): DirectiveDefinitionNode;
/**
* DirectiveLocations :
* - `|`? DirectiveLocation
* - DirectiveLocations | DirectiveLocation
*/
parseDirectiveLocations(): Array<NameNode>;
parseDirectiveLocation(): NameNode;
/**
* Returns a location object, used to identify the place in the source that created a given parsed object.
*/
loc(startToken: Token): Location | undefined;
/**
* Determines if the next token is of a given kind
*/
peek(kind: TokenKindEnum): boolean;
/**
* If the next token is of the given kind, return that token after advancing the lexer.
* Otherwise, do not change the parser state and throw an error.
*/
expectToken(kind: TokenKindEnum): Token;
/**
* If the next token is of the given kind, return "true" after advancing the lexer.
* Otherwise, do not change the parser state and return "false".
*/
expectOptionalToken(kind: TokenKindEnum): boolean;
/**
* If the next token is a given keyword, advance the lexer.
* Otherwise, do not change the parser state and throw an error.
*/
expectKeyword(value: string): void;
/**
* If the next token is a given keyword, return "true" after advancing the lexer.
* Otherwise, do not change the parser state and return "false".
*/
expectOptionalKeyword(value: string): boolean;
/**
* Helper function for creating an error when an unexpected lexed token is encountered.
*/
unexpected(atToken?: Maybe<Token>): GraphQLError;
/**
* Returns a possibly empty list of parse nodes, determined by the parseFn.
* This list begins with a lex token of openKind and ends with a lex token of closeKind.
* Advances the parser to the next lex token after the closing token.
*/
any<T>(
openKind: TokenKindEnum,
parseFn: () => T,
closeKind: TokenKindEnum,
): Array<T>;
/**
* Returns a list of parse nodes, determined by the parseFn.
* It can be empty only if open token is missing otherwise it will always return non-empty list
* that begins with a lex token of openKind and ends with a lex token of closeKind.
* Advances the parser to the next lex token after the closing token.
*/
optionalMany<T>(
openKind: TokenKindEnum,
parseFn: () => T,
closeKind: TokenKindEnum,
): Array<T>;
/**
* Returns a non-empty list of parse nodes, determined by the parseFn.
* This list begins with a lex token of openKind and ends with a lex token of closeKind.
* Advances the parser to the next lex token after the closing token.
*/
many<T>(
openKind: TokenKindEnum,
parseFn: () => T,
closeKind: TokenKindEnum,
): Array<T>;
/**
* Returns a non-empty list of parse nodes, determined by the parseFn.
* This list may begin with a lex token of delimiterKind followed by items separated by lex tokens of tokenKind.
* Advances the parser to the next lex token after last item in the list.
*/
delimitedMany<T>(delimiterKind: TokenKindEnum, parseFn: () => T): Array<T>;
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,36 @@
import {
ASTNode,
DefinitionNode,
ExecutableDefinitionNode,
SelectionNode,
ValueNode,
TypeNode,
TypeSystemDefinitionNode,
TypeDefinitionNode,
TypeSystemExtensionNode,
TypeExtensionNode,
} from './ast';
export function isDefinitionNode(node: ASTNode): node is DefinitionNode;
export function isExecutableDefinitionNode(
node: ASTNode,
): node is ExecutableDefinitionNode;
export function isSelectionNode(node: ASTNode): node is SelectionNode;
export function isValueNode(node: ASTNode): node is ValueNode;
export function isTypeNode(node: ASTNode): node is TypeNode;
export function isTypeSystemDefinitionNode(
node: ASTNode,
): node is TypeSystemDefinitionNode;
export function isTypeDefinitionNode(node: ASTNode): node is TypeDefinitionNode;
export function isTypeSystemExtensionNode(
node: ASTNode,
): node is TypeSystemExtensionNode;
export function isTypeExtensionNode(node: ASTNode): node is TypeExtensionNode;

View File

@@ -0,0 +1,52 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.isDefinitionNode = isDefinitionNode;
exports.isExecutableDefinitionNode = isExecutableDefinitionNode;
exports.isSelectionNode = isSelectionNode;
exports.isValueNode = isValueNode;
exports.isTypeNode = isTypeNode;
exports.isTypeSystemDefinitionNode = isTypeSystemDefinitionNode;
exports.isTypeDefinitionNode = isTypeDefinitionNode;
exports.isTypeSystemExtensionNode = isTypeSystemExtensionNode;
exports.isTypeExtensionNode = isTypeExtensionNode;
var _kinds = require("./kinds.js");
function isDefinitionNode(node) {
return isExecutableDefinitionNode(node) || isTypeSystemDefinitionNode(node) || isTypeSystemExtensionNode(node);
}
function isExecutableDefinitionNode(node) {
return node.kind === _kinds.Kind.OPERATION_DEFINITION || node.kind === _kinds.Kind.FRAGMENT_DEFINITION;
}
function isSelectionNode(node) {
return node.kind === _kinds.Kind.FIELD || node.kind === _kinds.Kind.FRAGMENT_SPREAD || node.kind === _kinds.Kind.INLINE_FRAGMENT;
}
function isValueNode(node) {
return node.kind === _kinds.Kind.VARIABLE || node.kind === _kinds.Kind.INT || node.kind === _kinds.Kind.FLOAT || node.kind === _kinds.Kind.STRING || node.kind === _kinds.Kind.BOOLEAN || node.kind === _kinds.Kind.NULL || node.kind === _kinds.Kind.ENUM || node.kind === _kinds.Kind.LIST || node.kind === _kinds.Kind.OBJECT;
}
function isTypeNode(node) {
return node.kind === _kinds.Kind.NAMED_TYPE || node.kind === _kinds.Kind.LIST_TYPE || node.kind === _kinds.Kind.NON_NULL_TYPE;
}
function isTypeSystemDefinitionNode(node) {
return node.kind === _kinds.Kind.SCHEMA_DEFINITION || isTypeDefinitionNode(node) || node.kind === _kinds.Kind.DIRECTIVE_DEFINITION;
}
function isTypeDefinitionNode(node) {
return node.kind === _kinds.Kind.SCALAR_TYPE_DEFINITION || node.kind === _kinds.Kind.OBJECT_TYPE_DEFINITION || node.kind === _kinds.Kind.INTERFACE_TYPE_DEFINITION || node.kind === _kinds.Kind.UNION_TYPE_DEFINITION || node.kind === _kinds.Kind.ENUM_TYPE_DEFINITION || node.kind === _kinds.Kind.INPUT_OBJECT_TYPE_DEFINITION;
}
function isTypeSystemExtensionNode(node) {
return node.kind === _kinds.Kind.SCHEMA_EXTENSION || isTypeExtensionNode(node);
}
function isTypeExtensionNode(node) {
return node.kind === _kinds.Kind.SCALAR_TYPE_EXTENSION || node.kind === _kinds.Kind.OBJECT_TYPE_EXTENSION || node.kind === _kinds.Kind.INTERFACE_TYPE_EXTENSION || node.kind === _kinds.Kind.UNION_TYPE_EXTENSION || node.kind === _kinds.Kind.ENUM_TYPE_EXTENSION || node.kind === _kinds.Kind.INPUT_OBJECT_TYPE_EXTENSION;
}

View File

@@ -0,0 +1,82 @@
// @flow strict
import type { ASTNode } from './ast';
import { Kind } from './kinds';
export function isDefinitionNode(node: ASTNode): boolean %checks {
return (
isExecutableDefinitionNode(node) ||
isTypeSystemDefinitionNode(node) ||
isTypeSystemExtensionNode(node)
);
}
export function isExecutableDefinitionNode(node: ASTNode): boolean %checks {
return (
node.kind === Kind.OPERATION_DEFINITION ||
node.kind === Kind.FRAGMENT_DEFINITION
);
}
export function isSelectionNode(node: ASTNode): boolean %checks {
return (
node.kind === Kind.FIELD ||
node.kind === Kind.FRAGMENT_SPREAD ||
node.kind === Kind.INLINE_FRAGMENT
);
}
export function isValueNode(node: ASTNode): boolean %checks {
return (
node.kind === Kind.VARIABLE ||
node.kind === Kind.INT ||
node.kind === Kind.FLOAT ||
node.kind === Kind.STRING ||
node.kind === Kind.BOOLEAN ||
node.kind === Kind.NULL ||
node.kind === Kind.ENUM ||
node.kind === Kind.LIST ||
node.kind === Kind.OBJECT
);
}
export function isTypeNode(node: ASTNode): boolean %checks {
return (
node.kind === Kind.NAMED_TYPE ||
node.kind === Kind.LIST_TYPE ||
node.kind === Kind.NON_NULL_TYPE
);
}
export function isTypeSystemDefinitionNode(node: ASTNode): boolean %checks {
return (
node.kind === Kind.SCHEMA_DEFINITION ||
isTypeDefinitionNode(node) ||
node.kind === Kind.DIRECTIVE_DEFINITION
);
}
export function isTypeDefinitionNode(node: ASTNode): boolean %checks {
return (
node.kind === Kind.SCALAR_TYPE_DEFINITION ||
node.kind === Kind.OBJECT_TYPE_DEFINITION ||
node.kind === Kind.INTERFACE_TYPE_DEFINITION ||
node.kind === Kind.UNION_TYPE_DEFINITION ||
node.kind === Kind.ENUM_TYPE_DEFINITION ||
node.kind === Kind.INPUT_OBJECT_TYPE_DEFINITION
);
}
export function isTypeSystemExtensionNode(node: ASTNode): boolean %checks {
return node.kind === Kind.SCHEMA_EXTENSION || isTypeExtensionNode(node);
}
export function isTypeExtensionNode(node: ASTNode): boolean %checks {
return (
node.kind === Kind.SCALAR_TYPE_EXTENSION ||
node.kind === Kind.OBJECT_TYPE_EXTENSION ||
node.kind === Kind.INTERFACE_TYPE_EXTENSION ||
node.kind === Kind.UNION_TYPE_EXTENSION ||
node.kind === Kind.ENUM_TYPE_EXTENSION ||
node.kind === Kind.INPUT_OBJECT_TYPE_EXTENSION
);
}

View File

@@ -0,0 +1,28 @@
import { Kind } from "./kinds.mjs";
export function isDefinitionNode(node) {
return isExecutableDefinitionNode(node) || isTypeSystemDefinitionNode(node) || isTypeSystemExtensionNode(node);
}
export function isExecutableDefinitionNode(node) {
return node.kind === Kind.OPERATION_DEFINITION || node.kind === Kind.FRAGMENT_DEFINITION;
}
export function isSelectionNode(node) {
return node.kind === Kind.FIELD || node.kind === Kind.FRAGMENT_SPREAD || node.kind === Kind.INLINE_FRAGMENT;
}
export function isValueNode(node) {
return node.kind === Kind.VARIABLE || node.kind === Kind.INT || node.kind === Kind.FLOAT || node.kind === Kind.STRING || node.kind === Kind.BOOLEAN || node.kind === Kind.NULL || node.kind === Kind.ENUM || node.kind === Kind.LIST || node.kind === Kind.OBJECT;
}
export function isTypeNode(node) {
return node.kind === Kind.NAMED_TYPE || node.kind === Kind.LIST_TYPE || node.kind === Kind.NON_NULL_TYPE;
}
export function isTypeSystemDefinitionNode(node) {
return node.kind === Kind.SCHEMA_DEFINITION || isTypeDefinitionNode(node) || node.kind === Kind.DIRECTIVE_DEFINITION;
}
export function isTypeDefinitionNode(node) {
return node.kind === Kind.SCALAR_TYPE_DEFINITION || node.kind === Kind.OBJECT_TYPE_DEFINITION || node.kind === Kind.INTERFACE_TYPE_DEFINITION || node.kind === Kind.UNION_TYPE_DEFINITION || node.kind === Kind.ENUM_TYPE_DEFINITION || node.kind === Kind.INPUT_OBJECT_TYPE_DEFINITION;
}
export function isTypeSystemExtensionNode(node) {
return node.kind === Kind.SCHEMA_EXTENSION || isTypeExtensionNode(node);
}
export function isTypeExtensionNode(node) {
return node.kind === Kind.SCALAR_TYPE_EXTENSION || node.kind === Kind.OBJECT_TYPE_EXTENSION || node.kind === Kind.INTERFACE_TYPE_EXTENSION || node.kind === Kind.UNION_TYPE_EXTENSION || node.kind === Kind.ENUM_TYPE_EXTENSION || node.kind === Kind.INPUT_OBJECT_TYPE_EXTENSION;
}

View File

@@ -0,0 +1,16 @@
import { Location } from './ast';
import { Source } from './source';
import { SourceLocation } from './location';
/**
* Render a helpful description of the location in the GraphQL Source document.
*/
export function printLocation(location: Location): string;
/**
* Render a helpful description of the location in the GraphQL Source document.
*/
export function printSourceLocation(
source: Source,
sourceLocation: SourceLocation,
): string;

View File

@@ -0,0 +1,75 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.printLocation = printLocation;
exports.printSourceLocation = printSourceLocation;
var _location = require("./location.js");
/**
* Render a helpful description of the location in the GraphQL Source document.
*/
function printLocation(location) {
return printSourceLocation(location.source, (0, _location.getLocation)(location.source, location.start));
}
/**
* Render a helpful description of the location in the GraphQL Source document.
*/
function printSourceLocation(source, sourceLocation) {
var firstLineColumnOffset = source.locationOffset.column - 1;
var body = whitespace(firstLineColumnOffset) + source.body;
var lineIndex = sourceLocation.line - 1;
var lineOffset = source.locationOffset.line - 1;
var lineNum = sourceLocation.line + lineOffset;
var columnOffset = sourceLocation.line === 1 ? firstLineColumnOffset : 0;
var columnNum = sourceLocation.column + columnOffset;
var locationStr = "".concat(source.name, ":").concat(lineNum, ":").concat(columnNum, "\n");
var lines = body.split(/\r\n|[\n\r]/g);
var locationLine = lines[lineIndex]; // Special case for minified documents
if (locationLine.length > 120) {
var subLineIndex = Math.floor(columnNum / 80);
var subLineColumnNum = columnNum % 80;
var subLines = [];
for (var i = 0; i < locationLine.length; i += 80) {
subLines.push(locationLine.slice(i, i + 80));
}
return locationStr + printPrefixedLines([["".concat(lineNum), subLines[0]]].concat(subLines.slice(1, subLineIndex + 1).map(function (subLine) {
return ['', subLine];
}), [[' ', whitespace(subLineColumnNum - 1) + '^'], ['', subLines[subLineIndex + 1]]]));
}
return locationStr + printPrefixedLines([// Lines specified like this: ["prefix", "string"],
["".concat(lineNum - 1), lines[lineIndex - 1]], ["".concat(lineNum), locationLine], ['', whitespace(columnNum - 1) + '^'], ["".concat(lineNum + 1), lines[lineIndex + 1]]]);
}
function printPrefixedLines(lines) {
var existingLines = lines.filter(function (_ref) {
var _ = _ref[0],
line = _ref[1];
return line !== undefined;
});
var padLen = Math.max.apply(Math, existingLines.map(function (_ref2) {
var prefix = _ref2[0];
return prefix.length;
}));
return existingLines.map(function (_ref3) {
var prefix = _ref3[0],
line = _ref3[1];
return leftPad(padLen, prefix) + (line ? ' | ' + line : ' |');
}).join('\n');
}
function whitespace(len) {
return Array(len + 1).join(' ');
}
function leftPad(len, str) {
return whitespace(len - str.length) + str;
}

View File

@@ -0,0 +1,88 @@
// @flow strict
import type { Source } from './source';
import type { Location } from './ast';
import type { SourceLocation } from './location';
import { getLocation } from './location';
/**
* Render a helpful description of the location in the GraphQL Source document.
*/
export function printLocation(location: Location): string {
return printSourceLocation(
location.source,
getLocation(location.source, location.start),
);
}
/**
* Render a helpful description of the location in the GraphQL Source document.
*/
export function printSourceLocation(
source: Source,
sourceLocation: SourceLocation,
): string {
const firstLineColumnOffset = source.locationOffset.column - 1;
const body = whitespace(firstLineColumnOffset) + source.body;
const lineIndex = sourceLocation.line - 1;
const lineOffset = source.locationOffset.line - 1;
const lineNum = sourceLocation.line + lineOffset;
const columnOffset = sourceLocation.line === 1 ? firstLineColumnOffset : 0;
const columnNum = sourceLocation.column + columnOffset;
const locationStr = `${source.name}:${lineNum}:${columnNum}\n`;
const lines = body.split(/\r\n|[\n\r]/g);
const locationLine = lines[lineIndex];
// Special case for minified documents
if (locationLine.length > 120) {
const subLineIndex = Math.floor(columnNum / 80);
const subLineColumnNum = columnNum % 80;
const subLines = [];
for (let i = 0; i < locationLine.length; i += 80) {
subLines.push(locationLine.slice(i, i + 80));
}
return (
locationStr +
printPrefixedLines([
[`${lineNum}`, subLines[0]],
...subLines.slice(1, subLineIndex + 1).map((subLine) => ['', subLine]),
[' ', whitespace(subLineColumnNum - 1) + '^'],
['', subLines[subLineIndex + 1]],
])
);
}
return (
locationStr +
printPrefixedLines([
// Lines specified like this: ["prefix", "string"],
[`${lineNum - 1}`, lines[lineIndex - 1]],
[`${lineNum}`, locationLine],
['', whitespace(columnNum - 1) + '^'],
[`${lineNum + 1}`, lines[lineIndex + 1]],
])
);
}
function printPrefixedLines(lines: $ReadOnlyArray<[string, string]>): string {
const existingLines = lines.filter(([_, line]) => line !== undefined);
const padLen = Math.max(...existingLines.map(([prefix]) => prefix.length));
return existingLines
.map(
([prefix, line]) =>
leftPad(padLen, prefix) + (line ? ' | ' + line : ' |'),
)
.join('\n');
}
function whitespace(len: number): string {
return Array(len + 1).join(' ');
}
function leftPad(len: number, str: string): string {
return whitespace(len - str.length) + str;
}

View File

@@ -0,0 +1,66 @@
import { getLocation } from "./location.mjs";
/**
* Render a helpful description of the location in the GraphQL Source document.
*/
export function printLocation(location) {
return printSourceLocation(location.source, getLocation(location.source, location.start));
}
/**
* Render a helpful description of the location in the GraphQL Source document.
*/
export function printSourceLocation(source, sourceLocation) {
var firstLineColumnOffset = source.locationOffset.column - 1;
var body = whitespace(firstLineColumnOffset) + source.body;
var lineIndex = sourceLocation.line - 1;
var lineOffset = source.locationOffset.line - 1;
var lineNum = sourceLocation.line + lineOffset;
var columnOffset = sourceLocation.line === 1 ? firstLineColumnOffset : 0;
var columnNum = sourceLocation.column + columnOffset;
var locationStr = "".concat(source.name, ":").concat(lineNum, ":").concat(columnNum, "\n");
var lines = body.split(/\r\n|[\n\r]/g);
var locationLine = lines[lineIndex]; // Special case for minified documents
if (locationLine.length > 120) {
var subLineIndex = Math.floor(columnNum / 80);
var subLineColumnNum = columnNum % 80;
var subLines = [];
for (var i = 0; i < locationLine.length; i += 80) {
subLines.push(locationLine.slice(i, i + 80));
}
return locationStr + printPrefixedLines([["".concat(lineNum), subLines[0]]].concat(subLines.slice(1, subLineIndex + 1).map(function (subLine) {
return ['', subLine];
}), [[' ', whitespace(subLineColumnNum - 1) + '^'], ['', subLines[subLineIndex + 1]]]));
}
return locationStr + printPrefixedLines([// Lines specified like this: ["prefix", "string"],
["".concat(lineNum - 1), lines[lineIndex - 1]], ["".concat(lineNum), locationLine], ['', whitespace(columnNum - 1) + '^'], ["".concat(lineNum + 1), lines[lineIndex + 1]]]);
}
function printPrefixedLines(lines) {
var existingLines = lines.filter(function (_ref) {
var _ = _ref[0],
line = _ref[1];
return line !== undefined;
});
var padLen = Math.max.apply(Math, existingLines.map(function (_ref2) {
var prefix = _ref2[0];
return prefix.length;
}));
return existingLines.map(function (_ref3) {
var prefix = _ref3[0],
line = _ref3[1];
return leftPad(padLen, prefix) + (line ? ' | ' + line : ' |');
}).join('\n');
}
function whitespace(len) {
return Array(len + 1).join(' ');
}
function leftPad(len, str) {
return whitespace(len - str.length) + str;
}

View File

@@ -0,0 +1,7 @@
import { ASTNode } from './ast';
/**
* Converts an AST into a string, using one set of reasonable
* formatting rules.
*/
export function print(ast: ASTNode): string;

View File

@@ -0,0 +1,322 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.print = print;
var _visitor = require("./visitor.js");
var _blockString = require("./blockString.js");
/**
* Converts an AST into a string, using one set of reasonable
* formatting rules.
*/
function print(ast) {
return (0, _visitor.visit)(ast, {
leave: printDocASTReducer
});
}
var MAX_LINE_LENGTH = 80; // TODO: provide better type coverage in future
var printDocASTReducer = {
Name: function Name(node) {
return node.value;
},
Variable: function Variable(node) {
return '$' + node.name;
},
// Document
Document: function Document(node) {
return join(node.definitions, '\n\n') + '\n';
},
OperationDefinition: function OperationDefinition(node) {
var op = node.operation;
var name = node.name;
var varDefs = wrap('(', join(node.variableDefinitions, ', '), ')');
var directives = join(node.directives, ' ');
var selectionSet = node.selectionSet; // Anonymous queries with no directives or variable definitions can use
// the query short form.
return !name && !directives && !varDefs && op === 'query' ? selectionSet : join([op, join([name, varDefs]), directives, selectionSet], ' ');
},
VariableDefinition: function VariableDefinition(_ref) {
var variable = _ref.variable,
type = _ref.type,
defaultValue = _ref.defaultValue,
directives = _ref.directives;
return variable + ': ' + type + wrap(' = ', defaultValue) + wrap(' ', join(directives, ' '));
},
SelectionSet: function SelectionSet(_ref2) {
var selections = _ref2.selections;
return block(selections);
},
Field: function Field(_ref3) {
var alias = _ref3.alias,
name = _ref3.name,
args = _ref3.arguments,
directives = _ref3.directives,
selectionSet = _ref3.selectionSet;
var prefix = wrap('', alias, ': ') + name;
var argsLine = prefix + wrap('(', join(args, ', '), ')');
if (argsLine.length > MAX_LINE_LENGTH) {
argsLine = prefix + wrap('(\n', indent(join(args, '\n')), '\n)');
}
return join([argsLine, join(directives, ' '), selectionSet], ' ');
},
Argument: function Argument(_ref4) {
var name = _ref4.name,
value = _ref4.value;
return name + ': ' + value;
},
// Fragments
FragmentSpread: function FragmentSpread(_ref5) {
var name = _ref5.name,
directives = _ref5.directives;
return '...' + name + wrap(' ', join(directives, ' '));
},
InlineFragment: function InlineFragment(_ref6) {
var typeCondition = _ref6.typeCondition,
directives = _ref6.directives,
selectionSet = _ref6.selectionSet;
return join(['...', wrap('on ', typeCondition), join(directives, ' '), selectionSet], ' ');
},
FragmentDefinition: function FragmentDefinition(_ref7) {
var name = _ref7.name,
typeCondition = _ref7.typeCondition,
variableDefinitions = _ref7.variableDefinitions,
directives = _ref7.directives,
selectionSet = _ref7.selectionSet;
return (// Note: fragment variable definitions are experimental and may be changed
// or removed in the future.
"fragment ".concat(name).concat(wrap('(', join(variableDefinitions, ', '), ')'), " ") + "on ".concat(typeCondition, " ").concat(wrap('', join(directives, ' '), ' ')) + selectionSet
);
},
// Value
IntValue: function IntValue(_ref8) {
var value = _ref8.value;
return value;
},
FloatValue: function FloatValue(_ref9) {
var value = _ref9.value;
return value;
},
StringValue: function StringValue(_ref10, key) {
var value = _ref10.value,
isBlockString = _ref10.block;
return isBlockString ? (0, _blockString.printBlockString)(value, key === 'description' ? '' : ' ') : JSON.stringify(value);
},
BooleanValue: function BooleanValue(_ref11) {
var value = _ref11.value;
return value ? 'true' : 'false';
},
NullValue: function NullValue() {
return 'null';
},
EnumValue: function EnumValue(_ref12) {
var value = _ref12.value;
return value;
},
ListValue: function ListValue(_ref13) {
var values = _ref13.values;
return '[' + join(values, ', ') + ']';
},
ObjectValue: function ObjectValue(_ref14) {
var fields = _ref14.fields;
return '{' + join(fields, ', ') + '}';
},
ObjectField: function ObjectField(_ref15) {
var name = _ref15.name,
value = _ref15.value;
return name + ': ' + value;
},
// Directive
Directive: function Directive(_ref16) {
var name = _ref16.name,
args = _ref16.arguments;
return '@' + name + wrap('(', join(args, ', '), ')');
},
// Type
NamedType: function NamedType(_ref17) {
var name = _ref17.name;
return name;
},
ListType: function ListType(_ref18) {
var type = _ref18.type;
return '[' + type + ']';
},
NonNullType: function NonNullType(_ref19) {
var type = _ref19.type;
return type + '!';
},
// Type System Definitions
SchemaDefinition: addDescription(function (_ref20) {
var directives = _ref20.directives,
operationTypes = _ref20.operationTypes;
return join(['schema', join(directives, ' '), block(operationTypes)], ' ');
}),
OperationTypeDefinition: function OperationTypeDefinition(_ref21) {
var operation = _ref21.operation,
type = _ref21.type;
return operation + ': ' + type;
},
ScalarTypeDefinition: addDescription(function (_ref22) {
var name = _ref22.name,
directives = _ref22.directives;
return join(['scalar', name, join(directives, ' ')], ' ');
}),
ObjectTypeDefinition: addDescription(function (_ref23) {
var name = _ref23.name,
interfaces = _ref23.interfaces,
directives = _ref23.directives,
fields = _ref23.fields;
return join(['type', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
}),
FieldDefinition: addDescription(function (_ref24) {
var name = _ref24.name,
args = _ref24.arguments,
type = _ref24.type,
directives = _ref24.directives;
return name + (hasMultilineItems(args) ? wrap('(\n', indent(join(args, '\n')), '\n)') : wrap('(', join(args, ', '), ')')) + ': ' + type + wrap(' ', join(directives, ' '));
}),
InputValueDefinition: addDescription(function (_ref25) {
var name = _ref25.name,
type = _ref25.type,
defaultValue = _ref25.defaultValue,
directives = _ref25.directives;
return join([name + ': ' + type, wrap('= ', defaultValue), join(directives, ' ')], ' ');
}),
InterfaceTypeDefinition: addDescription(function (_ref26) {
var name = _ref26.name,
interfaces = _ref26.interfaces,
directives = _ref26.directives,
fields = _ref26.fields;
return join(['interface', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
}),
UnionTypeDefinition: addDescription(function (_ref27) {
var name = _ref27.name,
directives = _ref27.directives,
types = _ref27.types;
return join(['union', name, join(directives, ' '), types && types.length !== 0 ? '= ' + join(types, ' | ') : ''], ' ');
}),
EnumTypeDefinition: addDescription(function (_ref28) {
var name = _ref28.name,
directives = _ref28.directives,
values = _ref28.values;
return join(['enum', name, join(directives, ' '), block(values)], ' ');
}),
EnumValueDefinition: addDescription(function (_ref29) {
var name = _ref29.name,
directives = _ref29.directives;
return join([name, join(directives, ' ')], ' ');
}),
InputObjectTypeDefinition: addDescription(function (_ref30) {
var name = _ref30.name,
directives = _ref30.directives,
fields = _ref30.fields;
return join(['input', name, join(directives, ' '), block(fields)], ' ');
}),
DirectiveDefinition: addDescription(function (_ref31) {
var name = _ref31.name,
args = _ref31.arguments,
repeatable = _ref31.repeatable,
locations = _ref31.locations;
return 'directive @' + name + (hasMultilineItems(args) ? wrap('(\n', indent(join(args, '\n')), '\n)') : wrap('(', join(args, ', '), ')')) + (repeatable ? ' repeatable' : '') + ' on ' + join(locations, ' | ');
}),
SchemaExtension: function SchemaExtension(_ref32) {
var directives = _ref32.directives,
operationTypes = _ref32.operationTypes;
return join(['extend schema', join(directives, ' '), block(operationTypes)], ' ');
},
ScalarTypeExtension: function ScalarTypeExtension(_ref33) {
var name = _ref33.name,
directives = _ref33.directives;
return join(['extend scalar', name, join(directives, ' ')], ' ');
},
ObjectTypeExtension: function ObjectTypeExtension(_ref34) {
var name = _ref34.name,
interfaces = _ref34.interfaces,
directives = _ref34.directives,
fields = _ref34.fields;
return join(['extend type', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
},
InterfaceTypeExtension: function InterfaceTypeExtension(_ref35) {
var name = _ref35.name,
interfaces = _ref35.interfaces,
directives = _ref35.directives,
fields = _ref35.fields;
return join(['extend interface', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
},
UnionTypeExtension: function UnionTypeExtension(_ref36) {
var name = _ref36.name,
directives = _ref36.directives,
types = _ref36.types;
return join(['extend union', name, join(directives, ' '), types && types.length !== 0 ? '= ' + join(types, ' | ') : ''], ' ');
},
EnumTypeExtension: function EnumTypeExtension(_ref37) {
var name = _ref37.name,
directives = _ref37.directives,
values = _ref37.values;
return join(['extend enum', name, join(directives, ' '), block(values)], ' ');
},
InputObjectTypeExtension: function InputObjectTypeExtension(_ref38) {
var name = _ref38.name,
directives = _ref38.directives,
fields = _ref38.fields;
return join(['extend input', name, join(directives, ' '), block(fields)], ' ');
}
};
function addDescription(cb) {
return function (node) {
return join([node.description, cb(node)], '\n');
};
}
/**
* Given maybeArray, print an empty string if it is null or empty, otherwise
* print all items together separated by separator if provided
*/
function join(maybeArray) {
var _maybeArray$filter$jo;
var separator = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
return (_maybeArray$filter$jo = maybeArray === null || maybeArray === void 0 ? void 0 : maybeArray.filter(function (x) {
return x;
}).join(separator)) !== null && _maybeArray$filter$jo !== void 0 ? _maybeArray$filter$jo : '';
}
/**
* Given array, print each item on its own line, wrapped in an
* indented "{ }" block.
*/
function block(array) {
return wrap('{\n', indent(join(array, '\n')), '\n}');
}
/**
* If maybeString is not null or empty, then wrap with start and end, otherwise print an empty string.
*/
function wrap(start, maybeString) {
var end = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : '';
return maybeString != null && maybeString !== '' ? start + maybeString + end : '';
}
function indent(str) {
return wrap(' ', str.replace(/\n/g, '\n '));
}
function isMultiline(str) {
return str.indexOf('\n') !== -1;
}
function hasMultilineItems(maybeArray) {
return maybeArray != null && maybeArray.some(isMultiline);
}

View File

@@ -0,0 +1,292 @@
// @flow strict
import type { ASTNode } from './ast';
import { visit } from './visitor';
import { printBlockString } from './blockString';
/**
* Converts an AST into a string, using one set of reasonable
* formatting rules.
*/
export function print(ast: ASTNode): string {
return visit(ast, { leave: printDocASTReducer });
}
const MAX_LINE_LENGTH = 80;
// TODO: provide better type coverage in future
const printDocASTReducer: any = {
Name: (node) => node.value,
Variable: (node) => '$' + node.name,
// Document
Document: (node) => join(node.definitions, '\n\n') + '\n',
OperationDefinition(node) {
const op = node.operation;
const name = node.name;
const varDefs = wrap('(', join(node.variableDefinitions, ', '), ')');
const directives = join(node.directives, ' ');
const selectionSet = node.selectionSet;
// Anonymous queries with no directives or variable definitions can use
// the query short form.
return !name && !directives && !varDefs && op === 'query'
? selectionSet
: join([op, join([name, varDefs]), directives, selectionSet], ' ');
},
VariableDefinition: ({ variable, type, defaultValue, directives }) =>
variable +
': ' +
type +
wrap(' = ', defaultValue) +
wrap(' ', join(directives, ' ')),
SelectionSet: ({ selections }) => block(selections),
Field: ({ alias, name, arguments: args, directives, selectionSet }) => {
const prefix = wrap('', alias, ': ') + name;
let argsLine = prefix + wrap('(', join(args, ', '), ')');
if (argsLine.length > MAX_LINE_LENGTH) {
argsLine = prefix + wrap('(\n', indent(join(args, '\n')), '\n)');
}
return join([argsLine, join(directives, ' '), selectionSet], ' ');
},
Argument: ({ name, value }) => name + ': ' + value,
// Fragments
FragmentSpread: ({ name, directives }) =>
'...' + name + wrap(' ', join(directives, ' ')),
InlineFragment: ({ typeCondition, directives, selectionSet }) =>
join(
['...', wrap('on ', typeCondition), join(directives, ' '), selectionSet],
' ',
),
FragmentDefinition: ({
name,
typeCondition,
variableDefinitions,
directives,
selectionSet,
}) =>
// Note: fragment variable definitions are experimental and may be changed
// or removed in the future.
`fragment ${name}${wrap('(', join(variableDefinitions, ', '), ')')} ` +
`on ${typeCondition} ${wrap('', join(directives, ' '), ' ')}` +
selectionSet,
// Value
IntValue: ({ value }) => value,
FloatValue: ({ value }) => value,
StringValue: ({ value, block: isBlockString }, key) =>
isBlockString
? printBlockString(value, key === 'description' ? '' : ' ')
: JSON.stringify(value),
BooleanValue: ({ value }) => (value ? 'true' : 'false'),
NullValue: () => 'null',
EnumValue: ({ value }) => value,
ListValue: ({ values }) => '[' + join(values, ', ') + ']',
ObjectValue: ({ fields }) => '{' + join(fields, ', ') + '}',
ObjectField: ({ name, value }) => name + ': ' + value,
// Directive
Directive: ({ name, arguments: args }) =>
'@' + name + wrap('(', join(args, ', '), ')'),
// Type
NamedType: ({ name }) => name,
ListType: ({ type }) => '[' + type + ']',
NonNullType: ({ type }) => type + '!',
// Type System Definitions
SchemaDefinition: addDescription(({ directives, operationTypes }) =>
join(['schema', join(directives, ' '), block(operationTypes)], ' '),
),
OperationTypeDefinition: ({ operation, type }) => operation + ': ' + type,
ScalarTypeDefinition: addDescription(({ name, directives }) =>
join(['scalar', name, join(directives, ' ')], ' '),
),
ObjectTypeDefinition: addDescription(
({ name, interfaces, directives, fields }) =>
join(
[
'type',
name,
wrap('implements ', join(interfaces, ' & ')),
join(directives, ' '),
block(fields),
],
' ',
),
),
FieldDefinition: addDescription(
({ name, arguments: args, type, directives }) =>
name +
(hasMultilineItems(args)
? wrap('(\n', indent(join(args, '\n')), '\n)')
: wrap('(', join(args, ', '), ')')) +
': ' +
type +
wrap(' ', join(directives, ' ')),
),
InputValueDefinition: addDescription(
({ name, type, defaultValue, directives }) =>
join(
[name + ': ' + type, wrap('= ', defaultValue), join(directives, ' ')],
' ',
),
),
InterfaceTypeDefinition: addDescription(
({ name, interfaces, directives, fields }) =>
join(
[
'interface',
name,
wrap('implements ', join(interfaces, ' & ')),
join(directives, ' '),
block(fields),
],
' ',
),
),
UnionTypeDefinition: addDescription(({ name, directives, types }) =>
join(
[
'union',
name,
join(directives, ' '),
types && types.length !== 0 ? '= ' + join(types, ' | ') : '',
],
' ',
),
),
EnumTypeDefinition: addDescription(({ name, directives, values }) =>
join(['enum', name, join(directives, ' '), block(values)], ' '),
),
EnumValueDefinition: addDescription(({ name, directives }) =>
join([name, join(directives, ' ')], ' '),
),
InputObjectTypeDefinition: addDescription(({ name, directives, fields }) =>
join(['input', name, join(directives, ' '), block(fields)], ' '),
),
DirectiveDefinition: addDescription(
({ name, arguments: args, repeatable, locations }) =>
'directive @' +
name +
(hasMultilineItems(args)
? wrap('(\n', indent(join(args, '\n')), '\n)')
: wrap('(', join(args, ', '), ')')) +
(repeatable ? ' repeatable' : '') +
' on ' +
join(locations, ' | '),
),
SchemaExtension: ({ directives, operationTypes }) =>
join(['extend schema', join(directives, ' '), block(operationTypes)], ' '),
ScalarTypeExtension: ({ name, directives }) =>
join(['extend scalar', name, join(directives, ' ')], ' '),
ObjectTypeExtension: ({ name, interfaces, directives, fields }) =>
join(
[
'extend type',
name,
wrap('implements ', join(interfaces, ' & ')),
join(directives, ' '),
block(fields),
],
' ',
),
InterfaceTypeExtension: ({ name, interfaces, directives, fields }) =>
join(
[
'extend interface',
name,
wrap('implements ', join(interfaces, ' & ')),
join(directives, ' '),
block(fields),
],
' ',
),
UnionTypeExtension: ({ name, directives, types }) =>
join(
[
'extend union',
name,
join(directives, ' '),
types && types.length !== 0 ? '= ' + join(types, ' | ') : '',
],
' ',
),
EnumTypeExtension: ({ name, directives, values }) =>
join(['extend enum', name, join(directives, ' '), block(values)], ' '),
InputObjectTypeExtension: ({ name, directives, fields }) =>
join(['extend input', name, join(directives, ' '), block(fields)], ' '),
};
function addDescription(cb) {
return (node) => join([node.description, cb(node)], '\n');
}
/**
* Given maybeArray, print an empty string if it is null or empty, otherwise
* print all items together separated by separator if provided
*/
function join(maybeArray: ?Array<string>, separator = ''): string {
return maybeArray?.filter((x) => x).join(separator) ?? '';
}
/**
* Given array, print each item on its own line, wrapped in an
* indented "{ }" block.
*/
function block(array: ?Array<string>): string {
return wrap('{\n', indent(join(array, '\n')), '\n}');
}
/**
* If maybeString is not null or empty, then wrap with start and end, otherwise print an empty string.
*/
function wrap(start: string, maybeString: ?string, end: string = ''): string {
return maybeString != null && maybeString !== ''
? start + maybeString + end
: '';
}
function indent(str: string): string {
return wrap(' ', str.replace(/\n/g, '\n '));
}
function isMultiline(str: string): boolean {
return str.indexOf('\n') !== -1;
}
function hasMultilineItems(maybeArray: ?Array<string>): boolean {
return maybeArray != null && maybeArray.some(isMultiline);
}

View File

@@ -0,0 +1,313 @@
import { visit } from "./visitor.mjs";
import { printBlockString } from "./blockString.mjs";
/**
* Converts an AST into a string, using one set of reasonable
* formatting rules.
*/
export function print(ast) {
return visit(ast, {
leave: printDocASTReducer
});
}
var MAX_LINE_LENGTH = 80; // TODO: provide better type coverage in future
var printDocASTReducer = {
Name: function Name(node) {
return node.value;
},
Variable: function Variable(node) {
return '$' + node.name;
},
// Document
Document: function Document(node) {
return join(node.definitions, '\n\n') + '\n';
},
OperationDefinition: function OperationDefinition(node) {
var op = node.operation;
var name = node.name;
var varDefs = wrap('(', join(node.variableDefinitions, ', '), ')');
var directives = join(node.directives, ' ');
var selectionSet = node.selectionSet; // Anonymous queries with no directives or variable definitions can use
// the query short form.
return !name && !directives && !varDefs && op === 'query' ? selectionSet : join([op, join([name, varDefs]), directives, selectionSet], ' ');
},
VariableDefinition: function VariableDefinition(_ref) {
var variable = _ref.variable,
type = _ref.type,
defaultValue = _ref.defaultValue,
directives = _ref.directives;
return variable + ': ' + type + wrap(' = ', defaultValue) + wrap(' ', join(directives, ' '));
},
SelectionSet: function SelectionSet(_ref2) {
var selections = _ref2.selections;
return block(selections);
},
Field: function Field(_ref3) {
var alias = _ref3.alias,
name = _ref3.name,
args = _ref3.arguments,
directives = _ref3.directives,
selectionSet = _ref3.selectionSet;
var prefix = wrap('', alias, ': ') + name;
var argsLine = prefix + wrap('(', join(args, ', '), ')');
if (argsLine.length > MAX_LINE_LENGTH) {
argsLine = prefix + wrap('(\n', indent(join(args, '\n')), '\n)');
}
return join([argsLine, join(directives, ' '), selectionSet], ' ');
},
Argument: function Argument(_ref4) {
var name = _ref4.name,
value = _ref4.value;
return name + ': ' + value;
},
// Fragments
FragmentSpread: function FragmentSpread(_ref5) {
var name = _ref5.name,
directives = _ref5.directives;
return '...' + name + wrap(' ', join(directives, ' '));
},
InlineFragment: function InlineFragment(_ref6) {
var typeCondition = _ref6.typeCondition,
directives = _ref6.directives,
selectionSet = _ref6.selectionSet;
return join(['...', wrap('on ', typeCondition), join(directives, ' '), selectionSet], ' ');
},
FragmentDefinition: function FragmentDefinition(_ref7) {
var name = _ref7.name,
typeCondition = _ref7.typeCondition,
variableDefinitions = _ref7.variableDefinitions,
directives = _ref7.directives,
selectionSet = _ref7.selectionSet;
return (// Note: fragment variable definitions are experimental and may be changed
// or removed in the future.
"fragment ".concat(name).concat(wrap('(', join(variableDefinitions, ', '), ')'), " ") + "on ".concat(typeCondition, " ").concat(wrap('', join(directives, ' '), ' ')) + selectionSet
);
},
// Value
IntValue: function IntValue(_ref8) {
var value = _ref8.value;
return value;
},
FloatValue: function FloatValue(_ref9) {
var value = _ref9.value;
return value;
},
StringValue: function StringValue(_ref10, key) {
var value = _ref10.value,
isBlockString = _ref10.block;
return isBlockString ? printBlockString(value, key === 'description' ? '' : ' ') : JSON.stringify(value);
},
BooleanValue: function BooleanValue(_ref11) {
var value = _ref11.value;
return value ? 'true' : 'false';
},
NullValue: function NullValue() {
return 'null';
},
EnumValue: function EnumValue(_ref12) {
var value = _ref12.value;
return value;
},
ListValue: function ListValue(_ref13) {
var values = _ref13.values;
return '[' + join(values, ', ') + ']';
},
ObjectValue: function ObjectValue(_ref14) {
var fields = _ref14.fields;
return '{' + join(fields, ', ') + '}';
},
ObjectField: function ObjectField(_ref15) {
var name = _ref15.name,
value = _ref15.value;
return name + ': ' + value;
},
// Directive
Directive: function Directive(_ref16) {
var name = _ref16.name,
args = _ref16.arguments;
return '@' + name + wrap('(', join(args, ', '), ')');
},
// Type
NamedType: function NamedType(_ref17) {
var name = _ref17.name;
return name;
},
ListType: function ListType(_ref18) {
var type = _ref18.type;
return '[' + type + ']';
},
NonNullType: function NonNullType(_ref19) {
var type = _ref19.type;
return type + '!';
},
// Type System Definitions
SchemaDefinition: addDescription(function (_ref20) {
var directives = _ref20.directives,
operationTypes = _ref20.operationTypes;
return join(['schema', join(directives, ' '), block(operationTypes)], ' ');
}),
OperationTypeDefinition: function OperationTypeDefinition(_ref21) {
var operation = _ref21.operation,
type = _ref21.type;
return operation + ': ' + type;
},
ScalarTypeDefinition: addDescription(function (_ref22) {
var name = _ref22.name,
directives = _ref22.directives;
return join(['scalar', name, join(directives, ' ')], ' ');
}),
ObjectTypeDefinition: addDescription(function (_ref23) {
var name = _ref23.name,
interfaces = _ref23.interfaces,
directives = _ref23.directives,
fields = _ref23.fields;
return join(['type', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
}),
FieldDefinition: addDescription(function (_ref24) {
var name = _ref24.name,
args = _ref24.arguments,
type = _ref24.type,
directives = _ref24.directives;
return name + (hasMultilineItems(args) ? wrap('(\n', indent(join(args, '\n')), '\n)') : wrap('(', join(args, ', '), ')')) + ': ' + type + wrap(' ', join(directives, ' '));
}),
InputValueDefinition: addDescription(function (_ref25) {
var name = _ref25.name,
type = _ref25.type,
defaultValue = _ref25.defaultValue,
directives = _ref25.directives;
return join([name + ': ' + type, wrap('= ', defaultValue), join(directives, ' ')], ' ');
}),
InterfaceTypeDefinition: addDescription(function (_ref26) {
var name = _ref26.name,
interfaces = _ref26.interfaces,
directives = _ref26.directives,
fields = _ref26.fields;
return join(['interface', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
}),
UnionTypeDefinition: addDescription(function (_ref27) {
var name = _ref27.name,
directives = _ref27.directives,
types = _ref27.types;
return join(['union', name, join(directives, ' '), types && types.length !== 0 ? '= ' + join(types, ' | ') : ''], ' ');
}),
EnumTypeDefinition: addDescription(function (_ref28) {
var name = _ref28.name,
directives = _ref28.directives,
values = _ref28.values;
return join(['enum', name, join(directives, ' '), block(values)], ' ');
}),
EnumValueDefinition: addDescription(function (_ref29) {
var name = _ref29.name,
directives = _ref29.directives;
return join([name, join(directives, ' ')], ' ');
}),
InputObjectTypeDefinition: addDescription(function (_ref30) {
var name = _ref30.name,
directives = _ref30.directives,
fields = _ref30.fields;
return join(['input', name, join(directives, ' '), block(fields)], ' ');
}),
DirectiveDefinition: addDescription(function (_ref31) {
var name = _ref31.name,
args = _ref31.arguments,
repeatable = _ref31.repeatable,
locations = _ref31.locations;
return 'directive @' + name + (hasMultilineItems(args) ? wrap('(\n', indent(join(args, '\n')), '\n)') : wrap('(', join(args, ', '), ')')) + (repeatable ? ' repeatable' : '') + ' on ' + join(locations, ' | ');
}),
SchemaExtension: function SchemaExtension(_ref32) {
var directives = _ref32.directives,
operationTypes = _ref32.operationTypes;
return join(['extend schema', join(directives, ' '), block(operationTypes)], ' ');
},
ScalarTypeExtension: function ScalarTypeExtension(_ref33) {
var name = _ref33.name,
directives = _ref33.directives;
return join(['extend scalar', name, join(directives, ' ')], ' ');
},
ObjectTypeExtension: function ObjectTypeExtension(_ref34) {
var name = _ref34.name,
interfaces = _ref34.interfaces,
directives = _ref34.directives,
fields = _ref34.fields;
return join(['extend type', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
},
InterfaceTypeExtension: function InterfaceTypeExtension(_ref35) {
var name = _ref35.name,
interfaces = _ref35.interfaces,
directives = _ref35.directives,
fields = _ref35.fields;
return join(['extend interface', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
},
UnionTypeExtension: function UnionTypeExtension(_ref36) {
var name = _ref36.name,
directives = _ref36.directives,
types = _ref36.types;
return join(['extend union', name, join(directives, ' '), types && types.length !== 0 ? '= ' + join(types, ' | ') : ''], ' ');
},
EnumTypeExtension: function EnumTypeExtension(_ref37) {
var name = _ref37.name,
directives = _ref37.directives,
values = _ref37.values;
return join(['extend enum', name, join(directives, ' '), block(values)], ' ');
},
InputObjectTypeExtension: function InputObjectTypeExtension(_ref38) {
var name = _ref38.name,
directives = _ref38.directives,
fields = _ref38.fields;
return join(['extend input', name, join(directives, ' '), block(fields)], ' ');
}
};
function addDescription(cb) {
return function (node) {
return join([node.description, cb(node)], '\n');
};
}
/**
* Given maybeArray, print an empty string if it is null or empty, otherwise
* print all items together separated by separator if provided
*/
function join(maybeArray) {
var _maybeArray$filter$jo;
var separator = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
return (_maybeArray$filter$jo = maybeArray === null || maybeArray === void 0 ? void 0 : maybeArray.filter(function (x) {
return x;
}).join(separator)) !== null && _maybeArray$filter$jo !== void 0 ? _maybeArray$filter$jo : '';
}
/**
* Given array, print each item on its own line, wrapped in an
* indented "{ }" block.
*/
function block(array) {
return wrap('{\n', indent(join(array, '\n')), '\n}');
}
/**
* If maybeString is not null or empty, then wrap with start and end, otherwise print an empty string.
*/
function wrap(start, maybeString) {
var end = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : '';
return maybeString != null && maybeString !== '' ? start + maybeString + end : '';
}
function indent(str) {
return wrap(' ', str.replace(/\n/g, '\n '));
}
function isMultiline(str) {
return str.indexOf('\n') !== -1;
}
function hasMultilineItems(maybeArray) {
return maybeArray != null && maybeArray.some(isMultiline);
}

View File

@@ -0,0 +1,25 @@
interface Location {
line: number;
column: number;
}
/**
* A representation of source input to GraphQL. The `name` and `locationOffset` parameters are
* optional, but they are useful for clients who store GraphQL documents in source files.
* For example, if the GraphQL input starts at line 40 in a file named `Foo.graphql`, it might
* be useful for `name` to be `"Foo.graphql"` and location to be `{ line: 40, column: 1 }`.
* The `line` and `column` properties in `locationOffset` are 1-indexed.
*/
export class Source {
body: string;
name: string;
locationOffset: Location;
constructor(body: string, name?: string, locationOffset?: Location);
}
/**
* Test if the given value is a Source object.
*
* @internal
*/
export function isSource(source: any): source is Source;

View File

@@ -0,0 +1,67 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.isSource = isSource;
exports.Source = void 0;
var _symbols = require("../polyfills/symbols.js");
var _inspect = _interopRequireDefault(require("../jsutils/inspect.js"));
var _devAssert = _interopRequireDefault(require("../jsutils/devAssert.js"));
var _instanceOf = _interopRequireDefault(require("../jsutils/instanceOf.js"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
/**
* A representation of source input to GraphQL. The `name` and `locationOffset` parameters are
* optional, but they are useful for clients who store GraphQL documents in source files.
* For example, if the GraphQL input starts at line 40 in a file named `Foo.graphql`, it might
* be useful for `name` to be `"Foo.graphql"` and location to be `{ line: 40, column: 1 }`.
* The `line` and `column` properties in `locationOffset` are 1-indexed.
*/
var Source = /*#__PURE__*/function () {
function Source(body) {
var name = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'GraphQL request';
var locationOffset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {
line: 1,
column: 1
};
typeof body === 'string' || (0, _devAssert.default)(0, "Body must be a string. Received: ".concat((0, _inspect.default)(body), "."));
this.body = body;
this.name = name;
this.locationOffset = locationOffset;
this.locationOffset.line > 0 || (0, _devAssert.default)(0, 'line in locationOffset is 1-indexed and must be positive.');
this.locationOffset.column > 0 || (0, _devAssert.default)(0, 'column in locationOffset is 1-indexed and must be positive.');
} // $FlowFixMe[unsupported-syntax] Flow doesn't support computed properties yet
_createClass(Source, [{
key: _symbols.SYMBOL_TO_STRING_TAG,
get: function get() {
return 'Source';
}
}]);
return Source;
}();
/**
* Test if the given value is a Source object.
*
* @internal
*/
exports.Source = Source;
// eslint-disable-next-line no-redeclare
function isSource(source) {
return (0, _instanceOf.default)(source, Source);
}

View File

@@ -0,0 +1,64 @@
// @flow strict
import { SYMBOL_TO_STRING_TAG } from '../polyfills/symbols';
import inspect from '../jsutils/inspect';
import devAssert from '../jsutils/devAssert';
import instanceOf from '../jsutils/instanceOf';
type Location = {|
line: number,
column: number,
|};
/**
* A representation of source input to GraphQL. The `name` and `locationOffset` parameters are
* optional, but they are useful for clients who store GraphQL documents in source files.
* For example, if the GraphQL input starts at line 40 in a file named `Foo.graphql`, it might
* be useful for `name` to be `"Foo.graphql"` and location to be `{ line: 40, column: 1 }`.
* The `line` and `column` properties in `locationOffset` are 1-indexed.
*/
export class Source {
body: string;
name: string;
locationOffset: Location;
constructor(
body: string,
name: string = 'GraphQL request',
locationOffset: Location = { line: 1, column: 1 },
) {
devAssert(
typeof body === 'string',
`Body must be a string. Received: ${inspect(body)}.`,
);
this.body = body;
this.name = name;
this.locationOffset = locationOffset;
devAssert(
this.locationOffset.line > 0,
'line in locationOffset is 1-indexed and must be positive.',
);
devAssert(
this.locationOffset.column > 0,
'column in locationOffset is 1-indexed and must be positive.',
);
}
// $FlowFixMe[unsupported-syntax] Flow doesn't support computed properties yet
get [SYMBOL_TO_STRING_TAG]() {
return 'Source';
}
}
/**
* Test if the given value is a Source object.
*
* @internal
*/
declare function isSource(source: mixed): boolean %checks(source instanceof
Source);
// eslint-disable-next-line no-redeclare
export function isSource(source) {
return instanceOf(source, Source);
}

View File

@@ -0,0 +1,51 @@
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
import { SYMBOL_TO_STRING_TAG } from "../polyfills/symbols.mjs";
import inspect from "../jsutils/inspect.mjs";
import devAssert from "../jsutils/devAssert.mjs";
import instanceOf from "../jsutils/instanceOf.mjs";
/**
* A representation of source input to GraphQL. The `name` and `locationOffset` parameters are
* optional, but they are useful for clients who store GraphQL documents in source files.
* For example, if the GraphQL input starts at line 40 in a file named `Foo.graphql`, it might
* be useful for `name` to be `"Foo.graphql"` and location to be `{ line: 40, column: 1 }`.
* The `line` and `column` properties in `locationOffset` are 1-indexed.
*/
export var Source = /*#__PURE__*/function () {
function Source(body) {
var name = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'GraphQL request';
var locationOffset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {
line: 1,
column: 1
};
typeof body === 'string' || devAssert(0, "Body must be a string. Received: ".concat(inspect(body), "."));
this.body = body;
this.name = name;
this.locationOffset = locationOffset;
this.locationOffset.line > 0 || devAssert(0, 'line in locationOffset is 1-indexed and must be positive.');
this.locationOffset.column > 0 || devAssert(0, 'column in locationOffset is 1-indexed and must be positive.');
} // $FlowFixMe[unsupported-syntax] Flow doesn't support computed properties yet
_createClass(Source, [{
key: SYMBOL_TO_STRING_TAG,
get: function get() {
return 'Source';
}
}]);
return Source;
}();
/**
* Test if the given value is a Source object.
*
* @internal
*/
// eslint-disable-next-line no-redeclare
export function isSource(source) {
return instanceOf(source, Source);
}

View File

@@ -0,0 +1,33 @@
/**
* An exported enum describing the different kinds of tokens that the
* lexer emits.
*/
export const TokenKind: {
SOF: '<SOF>';
EOF: '<EOF>';
BANG: '!';
DOLLAR: '$';
AMP: '&';
PAREN_L: '(';
PAREN_R: ')';
SPREAD: '...';
COLON: ':';
EQUALS: '=';
AT: '@';
BRACKET_L: '[';
BRACKET_R: ']';
BRACE_L: '{';
PIPE: '|';
BRACE_R: '}';
NAME: 'Name';
INT: 'Int';
FLOAT: 'Float';
STRING: 'String';
BLOCK_STRING: 'BlockString';
COMMENT: 'Comment';
};
/**
* The enum type representing the token kinds values.
*/
export type TokenKindEnum = typeof TokenKind[keyof typeof TokenKind];

View File

@@ -0,0 +1,40 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.TokenKind = void 0;
/**
* An exported enum describing the different kinds of tokens that the
* lexer emits.
*/
var TokenKind = Object.freeze({
SOF: '<SOF>',
EOF: '<EOF>',
BANG: '!',
DOLLAR: '$',
AMP: '&',
PAREN_L: '(',
PAREN_R: ')',
SPREAD: '...',
COLON: ':',
EQUALS: '=',
AT: '@',
BRACKET_L: '[',
BRACKET_R: ']',
BRACE_L: '{',
PIPE: '|',
BRACE_R: '}',
NAME: 'Name',
INT: 'Int',
FLOAT: 'Float',
STRING: 'String',
BLOCK_STRING: 'BlockString',
COMMENT: 'Comment'
});
/**
* The enum type representing the token kinds values.
*/
exports.TokenKind = TokenKind;

View File

@@ -0,0 +1,34 @@
// @flow strict
/**
* An exported enum describing the different kinds of tokens that the
* lexer emits.
*/
export const TokenKind = Object.freeze({
SOF: '<SOF>',
EOF: '<EOF>',
BANG: '!',
DOLLAR: '$',
AMP: '&',
PAREN_L: '(',
PAREN_R: ')',
SPREAD: '...',
COLON: ':',
EQUALS: '=',
AT: '@',
BRACKET_L: '[',
BRACKET_R: ']',
BRACE_L: '{',
PIPE: '|',
BRACE_R: '}',
NAME: 'Name',
INT: 'Int',
FLOAT: 'Float',
STRING: 'String',
BLOCK_STRING: 'BlockString',
COMMENT: 'Comment',
});
/**
* The enum type representing the token kinds values.
*/
export type TokenKindEnum = $Values<typeof TokenKind>;

View File

@@ -0,0 +1,31 @@
/**
* An exported enum describing the different kinds of tokens that the
* lexer emits.
*/
export var TokenKind = Object.freeze({
SOF: '<SOF>',
EOF: '<EOF>',
BANG: '!',
DOLLAR: '$',
AMP: '&',
PAREN_L: '(',
PAREN_R: ')',
SPREAD: '...',
COLON: ':',
EQUALS: '=',
AT: '@',
BRACKET_L: '[',
BRACKET_R: ']',
BRACE_L: '{',
PIPE: '|',
BRACE_R: '}',
NAME: 'Name',
INT: 'Int',
FLOAT: 'Float',
STRING: 'String',
BLOCK_STRING: 'BlockString',
COMMENT: 'Comment'
});
/**
* The enum type representing the token kinds values.
*/

View File

@@ -0,0 +1,272 @@
import { Maybe } from '../jsutils/Maybe';
import { ASTNode, ASTKindToNode } from './ast';
/**
* A visitor is provided to visit, it contains the collection of
* relevant functions to be called during the visitor's traversal.
*/
export type ASTVisitor = Visitor<ASTKindToNode>;
export type Visitor<KindToNode, Nodes = KindToNode[keyof KindToNode]> =
| EnterLeaveVisitor<KindToNode, Nodes>
| ShapeMapVisitor<KindToNode, Nodes>;
interface EnterLeave<T> {
readonly enter?: T;
readonly leave?: T;
}
type EnterLeaveVisitor<KindToNode, Nodes> = EnterLeave<
VisitFn<Nodes> | { [K in keyof KindToNode]?: VisitFn<Nodes, KindToNode[K]> }
>;
type ShapeMapVisitor<KindToNode, Nodes> = {
[K in keyof KindToNode]?:
| VisitFn<Nodes, KindToNode[K]>
| EnterLeave<VisitFn<Nodes, KindToNode[K]>>;
};
/**
* A visitor is comprised of visit functions, which are called on each node
* during the visitor's traversal.
*/
export type VisitFn<TAnyNode, TVisitedNode = TAnyNode> = (
/** The current node being visiting. */
node: TVisitedNode,
/** The index or key to this node from the parent node or Array. */
key: string | number | undefined,
/** The parent immediately above this node, which may be an Array. */
parent: TAnyNode | ReadonlyArray<TAnyNode> | undefined,
/** The key path to get to this node from the root node. */
path: ReadonlyArray<string | number>,
/**
* All nodes and Arrays visited before reaching parent of this node.
* These correspond to array indices in `path`.
* Note: ancestors includes arrays which contain the parent of visited node.
*/
ancestors: ReadonlyArray<TAnyNode | ReadonlyArray<TAnyNode>>,
) => any;
/**
* A KeyMap describes each the traversable properties of each kind of node.
*
* @deprecated Please using ASTVisitorKeyMap instead
*/
export type VisitorKeyMap<T> = { [P in keyof T]: ReadonlyArray<keyof T[P]> };
/**
* A KeyMap describes each the traversable properties of each kind of node.
*/
export type ASTVisitorKeyMap = {
[P in keyof ASTKindToNode]?: ReadonlyArray<keyof ASTKindToNode[P]>;
};
// TODO: Should be `[]`, but that requires TypeScript@3
type EmptyTuple = Array<never>;
export const QueryDocumentKeys: {
Name: EmptyTuple;
Document: ['definitions'];
// Prettier forces trailing commas, but TS pre 3.2 doesn't allow them.
// prettier-ignore
OperationDefinition: [
'name',
'variableDefinitions',
'directives',
'selectionSet'
];
VariableDefinition: ['variable', 'type', 'defaultValue', 'directives'];
Variable: ['name'];
SelectionSet: ['selections'];
Field: ['alias', 'name', 'arguments', 'directives', 'selectionSet'];
Argument: ['name', 'value'];
FragmentSpread: ['name', 'directives'];
InlineFragment: ['typeCondition', 'directives', 'selectionSet'];
// prettier-ignore
FragmentDefinition: [
'name',
// Note: fragment variable definitions are experimental and may be changed
// or removed in the future.
'variableDefinitions',
'typeCondition',
'directives',
'selectionSet'
];
IntValue: EmptyTuple;
FloatValue: EmptyTuple;
StringValue: EmptyTuple;
BooleanValue: EmptyTuple;
NullValue: EmptyTuple;
EnumValue: EmptyTuple;
ListValue: ['values'];
ObjectValue: ['fields'];
ObjectField: ['name', 'value'];
Directive: ['name', 'arguments'];
NamedType: ['name'];
ListType: ['type'];
NonNullType: ['type'];
SchemaDefinition: ['description', 'directives', 'operationTypes'];
OperationTypeDefinition: ['type'];
ScalarTypeDefinition: ['description', 'name', 'directives'];
// prettier-ignore
ObjectTypeDefinition: [
'description',
'name',
'interfaces',
'directives',
'fields'
];
FieldDefinition: ['description', 'name', 'arguments', 'type', 'directives'];
// prettier-ignore
InputValueDefinition: [
'description',
'name',
'type',
'defaultValue',
'directives'
];
// prettier-ignore
InterfaceTypeDefinition: [
'description',
'name',
'interfaces',
'directives',
'fields'
];
UnionTypeDefinition: ['description', 'name', 'directives', 'types'];
EnumTypeDefinition: ['description', 'name', 'directives', 'values'];
EnumValueDefinition: ['description', 'name', 'directives'];
InputObjectTypeDefinition: ['description', 'name', 'directives', 'fields'];
DirectiveDefinition: ['description', 'name', 'arguments', 'locations'];
SchemaExtension: ['directives', 'operationTypes'];
ScalarTypeExtension: ['name', 'directives'];
ObjectTypeExtension: ['name', 'interfaces', 'directives', 'fields'];
InterfaceTypeExtension: ['name', 'interfaces', 'directives', 'fields'];
UnionTypeExtension: ['name', 'directives', 'types'];
EnumTypeExtension: ['name', 'directives', 'values'];
InputObjectTypeExtension: ['name', 'directives', 'fields'];
};
export const BREAK: any;
/**
* visit() will walk through an AST using a depth-first traversal, calling
* the visitor's enter function at each node in the traversal, and calling the
* leave function after visiting that node and all of its child nodes.
*
* By returning different values from the enter and leave functions, the
* behavior of the visitor can be altered, including skipping over a sub-tree of
* the AST (by returning false), editing the AST by returning a value or null
* to remove the value, or to stop the whole traversal by returning BREAK.
*
* When using visit() to edit an AST, the original AST will not be modified, and
* a new version of the AST with the changes applied will be returned from the
* visit function.
*
* const editedAST = visit(ast, {
* enter(node, key, parent, path, ancestors) {
* // @return
* // undefined: no action
* // false: skip visiting this node
* // visitor.BREAK: stop visiting altogether
* // null: delete this node
* // any value: replace this node with the returned value
* },
* leave(node, key, parent, path, ancestors) {
* // @return
* // undefined: no action
* // false: no action
* // visitor.BREAK: stop visiting altogether
* // null: delete this node
* // any value: replace this node with the returned value
* }
* });
*
* Alternatively to providing enter() and leave() functions, a visitor can
* instead provide functions named the same as the kinds of AST nodes, or
* enter/leave visitors at a named key, leading to four permutations of the
* visitor API:
*
* 1) Named visitors triggered when entering a node of a specific kind.
*
* visit(ast, {
* Kind(node) {
* // enter the "Kind" node
* }
* })
*
* 2) Named visitors that trigger upon entering and leaving a node of
* a specific kind.
*
* visit(ast, {
* Kind: {
* enter(node) {
* // enter the "Kind" node
* }
* leave(node) {
* // leave the "Kind" node
* }
* }
* })
*
* 3) Generic visitors that trigger upon entering and leaving any node.
*
* visit(ast, {
* enter(node) {
* // enter any node
* },
* leave(node) {
* // leave any node
* }
* })
*
* 4) Parallel visitors for entering and leaving nodes of a specific kind.
*
* visit(ast, {
* enter: {
* Kind(node) {
* // enter the "Kind" node
* }
* },
* leave: {
* Kind(node) {
* // leave the "Kind" node
* }
* }
* })
*/
export function visit(
root: ASTNode,
visitor: Visitor<ASTKindToNode>,
visitorKeys?: VisitorKeyMap<ASTKindToNode>, // default: QueryDocumentKeys
): any;
/**
* Creates a new visitor instance which delegates to many visitors to run in
* parallel. Each visitor will be visited for each node before moving on.
*
* If a prior visitor edits a node, no following visitors will see that node.
*/
export function visitInParallel(
visitors: ReadonlyArray<Visitor<ASTKindToNode>>,
): Visitor<ASTKindToNode>;
/**
* Given a visitor instance, if it is leaving or not, and a node kind, return
* the function the visitor runtime should call.
*/
export function getVisitFn(
visitor: Visitor<any>,
kind: string,
isLeaving: boolean,
): Maybe<VisitFn<any>>;

View File

@@ -0,0 +1,397 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.visit = visit;
exports.visitInParallel = visitInParallel;
exports.getVisitFn = getVisitFn;
exports.BREAK = exports.QueryDocumentKeys = void 0;
var _inspect = _interopRequireDefault(require("../jsutils/inspect.js"));
var _ast = require("./ast.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var QueryDocumentKeys = {
Name: [],
Document: ['definitions'],
OperationDefinition: ['name', 'variableDefinitions', 'directives', 'selectionSet'],
VariableDefinition: ['variable', 'type', 'defaultValue', 'directives'],
Variable: ['name'],
SelectionSet: ['selections'],
Field: ['alias', 'name', 'arguments', 'directives', 'selectionSet'],
Argument: ['name', 'value'],
FragmentSpread: ['name', 'directives'],
InlineFragment: ['typeCondition', 'directives', 'selectionSet'],
FragmentDefinition: ['name', // Note: fragment variable definitions are experimental and may be changed
// or removed in the future.
'variableDefinitions', 'typeCondition', 'directives', 'selectionSet'],
IntValue: [],
FloatValue: [],
StringValue: [],
BooleanValue: [],
NullValue: [],
EnumValue: [],
ListValue: ['values'],
ObjectValue: ['fields'],
ObjectField: ['name', 'value'],
Directive: ['name', 'arguments'],
NamedType: ['name'],
ListType: ['type'],
NonNullType: ['type'],
SchemaDefinition: ['description', 'directives', 'operationTypes'],
OperationTypeDefinition: ['type'],
ScalarTypeDefinition: ['description', 'name', 'directives'],
ObjectTypeDefinition: ['description', 'name', 'interfaces', 'directives', 'fields'],
FieldDefinition: ['description', 'name', 'arguments', 'type', 'directives'],
InputValueDefinition: ['description', 'name', 'type', 'defaultValue', 'directives'],
InterfaceTypeDefinition: ['description', 'name', 'interfaces', 'directives', 'fields'],
UnionTypeDefinition: ['description', 'name', 'directives', 'types'],
EnumTypeDefinition: ['description', 'name', 'directives', 'values'],
EnumValueDefinition: ['description', 'name', 'directives'],
InputObjectTypeDefinition: ['description', 'name', 'directives', 'fields'],
DirectiveDefinition: ['description', 'name', 'arguments', 'locations'],
SchemaExtension: ['directives', 'operationTypes'],
ScalarTypeExtension: ['name', 'directives'],
ObjectTypeExtension: ['name', 'interfaces', 'directives', 'fields'],
InterfaceTypeExtension: ['name', 'interfaces', 'directives', 'fields'],
UnionTypeExtension: ['name', 'directives', 'types'],
EnumTypeExtension: ['name', 'directives', 'values'],
InputObjectTypeExtension: ['name', 'directives', 'fields']
};
exports.QueryDocumentKeys = QueryDocumentKeys;
var BREAK = Object.freeze({});
/**
* visit() will walk through an AST using a depth-first traversal, calling
* the visitor's enter function at each node in the traversal, and calling the
* leave function after visiting that node and all of its child nodes.
*
* By returning different values from the enter and leave functions, the
* behavior of the visitor can be altered, including skipping over a sub-tree of
* the AST (by returning false), editing the AST by returning a value or null
* to remove the value, or to stop the whole traversal by returning BREAK.
*
* When using visit() to edit an AST, the original AST will not be modified, and
* a new version of the AST with the changes applied will be returned from the
* visit function.
*
* const editedAST = visit(ast, {
* enter(node, key, parent, path, ancestors) {
* // @return
* // undefined: no action
* // false: skip visiting this node
* // visitor.BREAK: stop visiting altogether
* // null: delete this node
* // any value: replace this node with the returned value
* },
* leave(node, key, parent, path, ancestors) {
* // @return
* // undefined: no action
* // false: no action
* // visitor.BREAK: stop visiting altogether
* // null: delete this node
* // any value: replace this node with the returned value
* }
* });
*
* Alternatively to providing enter() and leave() functions, a visitor can
* instead provide functions named the same as the kinds of AST nodes, or
* enter/leave visitors at a named key, leading to four permutations of the
* visitor API:
*
* 1) Named visitors triggered when entering a node of a specific kind.
*
* visit(ast, {
* Kind(node) {
* // enter the "Kind" node
* }
* })
*
* 2) Named visitors that trigger upon entering and leaving a node of
* a specific kind.
*
* visit(ast, {
* Kind: {
* enter(node) {
* // enter the "Kind" node
* }
* leave(node) {
* // leave the "Kind" node
* }
* }
* })
*
* 3) Generic visitors that trigger upon entering and leaving any node.
*
* visit(ast, {
* enter(node) {
* // enter any node
* },
* leave(node) {
* // leave any node
* }
* })
*
* 4) Parallel visitors for entering and leaving nodes of a specific kind.
*
* visit(ast, {
* enter: {
* Kind(node) {
* // enter the "Kind" node
* }
* },
* leave: {
* Kind(node) {
* // leave the "Kind" node
* }
* }
* })
*/
exports.BREAK = BREAK;
function visit(root, visitor) {
var visitorKeys = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : QueryDocumentKeys;
/* eslint-disable no-undef-init */
var stack = undefined;
var inArray = Array.isArray(root);
var keys = [root];
var index = -1;
var edits = [];
var node = undefined;
var key = undefined;
var parent = undefined;
var path = [];
var ancestors = [];
var newRoot = root;
/* eslint-enable no-undef-init */
do {
index++;
var isLeaving = index === keys.length;
var isEdited = isLeaving && edits.length !== 0;
if (isLeaving) {
key = ancestors.length === 0 ? undefined : path[path.length - 1];
node = parent;
parent = ancestors.pop();
if (isEdited) {
if (inArray) {
node = node.slice();
} else {
var clone = {};
for (var _i2 = 0, _Object$keys2 = Object.keys(node); _i2 < _Object$keys2.length; _i2++) {
var k = _Object$keys2[_i2];
clone[k] = node[k];
}
node = clone;
}
var editOffset = 0;
for (var ii = 0; ii < edits.length; ii++) {
var editKey = edits[ii][0];
var editValue = edits[ii][1];
if (inArray) {
editKey -= editOffset;
}
if (inArray && editValue === null) {
node.splice(editKey, 1);
editOffset++;
} else {
node[editKey] = editValue;
}
}
}
index = stack.index;
keys = stack.keys;
edits = stack.edits;
inArray = stack.inArray;
stack = stack.prev;
} else {
key = parent ? inArray ? index : keys[index] : undefined;
node = parent ? parent[key] : newRoot;
if (node === null || node === undefined) {
continue;
}
if (parent) {
path.push(key);
}
}
var result = void 0;
if (!Array.isArray(node)) {
if (!(0, _ast.isNode)(node)) {
throw new Error("Invalid AST Node: ".concat((0, _inspect.default)(node), "."));
}
var visitFn = getVisitFn(visitor, node.kind, isLeaving);
if (visitFn) {
result = visitFn.call(visitor, node, key, parent, path, ancestors);
if (result === BREAK) {
break;
}
if (result === false) {
if (!isLeaving) {
path.pop();
continue;
}
} else if (result !== undefined) {
edits.push([key, result]);
if (!isLeaving) {
if ((0, _ast.isNode)(result)) {
node = result;
} else {
path.pop();
continue;
}
}
}
}
}
if (result === undefined && isEdited) {
edits.push([key, node]);
}
if (isLeaving) {
path.pop();
} else {
var _visitorKeys$node$kin;
stack = {
inArray: inArray,
index: index,
keys: keys,
edits: edits,
prev: stack
};
inArray = Array.isArray(node);
keys = inArray ? node : (_visitorKeys$node$kin = visitorKeys[node.kind]) !== null && _visitorKeys$node$kin !== void 0 ? _visitorKeys$node$kin : [];
index = -1;
edits = [];
if (parent) {
ancestors.push(parent);
}
parent = node;
}
} while (stack !== undefined);
if (edits.length !== 0) {
newRoot = edits[edits.length - 1][1];
}
return newRoot;
}
/**
* Creates a new visitor instance which delegates to many visitors to run in
* parallel. Each visitor will be visited for each node before moving on.
*
* If a prior visitor edits a node, no following visitors will see that node.
*/
function visitInParallel(visitors) {
var skipping = new Array(visitors.length);
return {
enter: function enter(node) {
for (var i = 0; i < visitors.length; i++) {
if (skipping[i] == null) {
var fn = getVisitFn(visitors[i], node.kind,
/* isLeaving */
false);
if (fn) {
var result = fn.apply(visitors[i], arguments);
if (result === false) {
skipping[i] = node;
} else if (result === BREAK) {
skipping[i] = BREAK;
} else if (result !== undefined) {
return result;
}
}
}
}
},
leave: function leave(node) {
for (var i = 0; i < visitors.length; i++) {
if (skipping[i] == null) {
var fn = getVisitFn(visitors[i], node.kind,
/* isLeaving */
true);
if (fn) {
var result = fn.apply(visitors[i], arguments);
if (result === BREAK) {
skipping[i] = BREAK;
} else if (result !== undefined && result !== false) {
return result;
}
}
} else if (skipping[i] === node) {
skipping[i] = null;
}
}
}
};
}
/**
* Given a visitor instance, if it is leaving or not, and a node kind, return
* the function the visitor runtime should call.
*/
function getVisitFn(visitor, kind, isLeaving) {
var kindVisitor = visitor[kind];
if (kindVisitor) {
if (!isLeaving && typeof kindVisitor === 'function') {
// { Kind() {} }
return kindVisitor;
}
var kindSpecificVisitor = isLeaving ? kindVisitor.leave : kindVisitor.enter;
if (typeof kindSpecificVisitor === 'function') {
// { Kind: { enter() {}, leave() {} } }
return kindSpecificVisitor;
}
} else {
var specificVisitor = isLeaving ? visitor.leave : visitor.enter;
if (specificVisitor) {
if (typeof specificVisitor === 'function') {
// { enter() {}, leave() {} }
return specificVisitor;
}
var specificKindVisitor = specificVisitor[kind];
if (typeof specificKindVisitor === 'function') {
// { enter: { Kind() {} }, leave: { Kind() {} } }
return specificKindVisitor;
}
}
}
}

View File

@@ -0,0 +1,437 @@
// @flow strict
import inspect from '../jsutils/inspect';
import type { ASTNode, ASTKindToNode } from './ast';
import { isNode } from './ast';
/**
* A visitor is provided to visit, it contains the collection of
* relevant functions to be called during the visitor's traversal.
*/
export type ASTVisitor = Visitor<ASTKindToNode>;
export type Visitor<KindToNode, Nodes = $Values<KindToNode>> =
| EnterLeave<
| VisitFn<Nodes>
| ShapeMap<KindToNode, <Node>(Node) => VisitFn<Nodes, Node>>,
>
| ShapeMap<
KindToNode,
<Node>(Node) => VisitFn<Nodes, Node> | EnterLeave<VisitFn<Nodes, Node>>,
>;
type EnterLeave<T> = {| +enter?: T, +leave?: T |};
type ShapeMap<O, F> = $Shape<$ObjMap<O, F>>;
/**
* A visitor is comprised of visit functions, which are called on each node
* during the visitor's traversal.
*/
export type VisitFn<TAnyNode, TVisitedNode: TAnyNode = TAnyNode> = (
// The current node being visiting.
node: TVisitedNode,
// The index or key to this node from the parent node or Array.
key: string | number | void,
// The parent immediately above this node, which may be an Array.
parent: TAnyNode | $ReadOnlyArray<TAnyNode> | void,
// The key path to get to this node from the root node.
path: $ReadOnlyArray<string | number>,
// All nodes and Arrays visited before reaching parent of this node.
// These correspond to array indices in `path`.
// Note: ancestors includes arrays which contain the parent of visited node.
ancestors: $ReadOnlyArray<TAnyNode | $ReadOnlyArray<TAnyNode>>,
) => any;
/**
* A KeyMap describes each the traversable properties of each kind of node.
*/
export type VisitorKeyMap<KindToNode> = $ObjMap<
KindToNode,
<T>(T) => $ReadOnlyArray<$Keys<T>>,
>;
export const QueryDocumentKeys: VisitorKeyMap<ASTKindToNode> = {
Name: [],
Document: ['definitions'],
OperationDefinition: [
'name',
'variableDefinitions',
'directives',
'selectionSet',
],
VariableDefinition: ['variable', 'type', 'defaultValue', 'directives'],
Variable: ['name'],
SelectionSet: ['selections'],
Field: ['alias', 'name', 'arguments', 'directives', 'selectionSet'],
Argument: ['name', 'value'],
FragmentSpread: ['name', 'directives'],
InlineFragment: ['typeCondition', 'directives', 'selectionSet'],
FragmentDefinition: [
'name',
// Note: fragment variable definitions are experimental and may be changed
// or removed in the future.
'variableDefinitions',
'typeCondition',
'directives',
'selectionSet',
],
IntValue: [],
FloatValue: [],
StringValue: [],
BooleanValue: [],
NullValue: [],
EnumValue: [],
ListValue: ['values'],
ObjectValue: ['fields'],
ObjectField: ['name', 'value'],
Directive: ['name', 'arguments'],
NamedType: ['name'],
ListType: ['type'],
NonNullType: ['type'],
SchemaDefinition: ['description', 'directives', 'operationTypes'],
OperationTypeDefinition: ['type'],
ScalarTypeDefinition: ['description', 'name', 'directives'],
ObjectTypeDefinition: [
'description',
'name',
'interfaces',
'directives',
'fields',
],
FieldDefinition: ['description', 'name', 'arguments', 'type', 'directives'],
InputValueDefinition: [
'description',
'name',
'type',
'defaultValue',
'directives',
],
InterfaceTypeDefinition: [
'description',
'name',
'interfaces',
'directives',
'fields',
],
UnionTypeDefinition: ['description', 'name', 'directives', 'types'],
EnumTypeDefinition: ['description', 'name', 'directives', 'values'],
EnumValueDefinition: ['description', 'name', 'directives'],
InputObjectTypeDefinition: ['description', 'name', 'directives', 'fields'],
DirectiveDefinition: ['description', 'name', 'arguments', 'locations'],
SchemaExtension: ['directives', 'operationTypes'],
ScalarTypeExtension: ['name', 'directives'],
ObjectTypeExtension: ['name', 'interfaces', 'directives', 'fields'],
InterfaceTypeExtension: ['name', 'interfaces', 'directives', 'fields'],
UnionTypeExtension: ['name', 'directives', 'types'],
EnumTypeExtension: ['name', 'directives', 'values'],
InputObjectTypeExtension: ['name', 'directives', 'fields'],
};
export const BREAK: { ... } = Object.freeze({});
/**
* visit() will walk through an AST using a depth-first traversal, calling
* the visitor's enter function at each node in the traversal, and calling the
* leave function after visiting that node and all of its child nodes.
*
* By returning different values from the enter and leave functions, the
* behavior of the visitor can be altered, including skipping over a sub-tree of
* the AST (by returning false), editing the AST by returning a value or null
* to remove the value, or to stop the whole traversal by returning BREAK.
*
* When using visit() to edit an AST, the original AST will not be modified, and
* a new version of the AST with the changes applied will be returned from the
* visit function.
*
* const editedAST = visit(ast, {
* enter(node, key, parent, path, ancestors) {
* // @return
* // undefined: no action
* // false: skip visiting this node
* // visitor.BREAK: stop visiting altogether
* // null: delete this node
* // any value: replace this node with the returned value
* },
* leave(node, key, parent, path, ancestors) {
* // @return
* // undefined: no action
* // false: no action
* // visitor.BREAK: stop visiting altogether
* // null: delete this node
* // any value: replace this node with the returned value
* }
* });
*
* Alternatively to providing enter() and leave() functions, a visitor can
* instead provide functions named the same as the kinds of AST nodes, or
* enter/leave visitors at a named key, leading to four permutations of the
* visitor API:
*
* 1) Named visitors triggered when entering a node of a specific kind.
*
* visit(ast, {
* Kind(node) {
* // enter the "Kind" node
* }
* })
*
* 2) Named visitors that trigger upon entering and leaving a node of
* a specific kind.
*
* visit(ast, {
* Kind: {
* enter(node) {
* // enter the "Kind" node
* }
* leave(node) {
* // leave the "Kind" node
* }
* }
* })
*
* 3) Generic visitors that trigger upon entering and leaving any node.
*
* visit(ast, {
* enter(node) {
* // enter any node
* },
* leave(node) {
* // leave any node
* }
* })
*
* 4) Parallel visitors for entering and leaving nodes of a specific kind.
*
* visit(ast, {
* enter: {
* Kind(node) {
* // enter the "Kind" node
* }
* },
* leave: {
* Kind(node) {
* // leave the "Kind" node
* }
* }
* })
*/
export function visit(
root: ASTNode,
visitor: Visitor<ASTKindToNode>,
visitorKeys: VisitorKeyMap<ASTKindToNode> = QueryDocumentKeys,
): any {
/* eslint-disable no-undef-init */
let stack: any = undefined;
let inArray = Array.isArray(root);
let keys: any = [root];
let index = -1;
let edits = [];
let node: any = undefined;
let key: any = undefined;
let parent: any = undefined;
const path: any = [];
const ancestors = [];
let newRoot = root;
/* eslint-enable no-undef-init */
do {
index++;
const isLeaving = index === keys.length;
const isEdited = isLeaving && edits.length !== 0;
if (isLeaving) {
key = ancestors.length === 0 ? undefined : path[path.length - 1];
node = parent;
parent = ancestors.pop();
if (isEdited) {
if (inArray) {
node = node.slice();
} else {
const clone = {};
for (const k of Object.keys(node)) {
clone[k] = node[k];
}
node = clone;
}
let editOffset = 0;
for (let ii = 0; ii < edits.length; ii++) {
let editKey: any = edits[ii][0];
const editValue = edits[ii][1];
if (inArray) {
editKey -= editOffset;
}
if (inArray && editValue === null) {
node.splice(editKey, 1);
editOffset++;
} else {
node[editKey] = editValue;
}
}
}
index = stack.index;
keys = stack.keys;
edits = stack.edits;
inArray = stack.inArray;
stack = stack.prev;
} else {
key = parent ? (inArray ? index : keys[index]) : undefined;
node = parent ? parent[key] : newRoot;
if (node === null || node === undefined) {
continue;
}
if (parent) {
path.push(key);
}
}
let result;
if (!Array.isArray(node)) {
if (!isNode(node)) {
throw new Error(`Invalid AST Node: ${inspect(node)}.`);
}
const visitFn = getVisitFn(visitor, node.kind, isLeaving);
if (visitFn) {
result = visitFn.call(visitor, node, key, parent, path, ancestors);
if (result === BREAK) {
break;
}
if (result === false) {
if (!isLeaving) {
path.pop();
continue;
}
} else if (result !== undefined) {
edits.push([key, result]);
if (!isLeaving) {
if (isNode(result)) {
node = result;
} else {
path.pop();
continue;
}
}
}
}
}
if (result === undefined && isEdited) {
edits.push([key, node]);
}
if (isLeaving) {
path.pop();
} else {
stack = { inArray, index, keys, edits, prev: stack };
inArray = Array.isArray(node);
keys = inArray ? node : visitorKeys[node.kind] ?? [];
index = -1;
edits = [];
if (parent) {
ancestors.push(parent);
}
parent = node;
}
} while (stack !== undefined);
if (edits.length !== 0) {
newRoot = edits[edits.length - 1][1];
}
return newRoot;
}
/**
* Creates a new visitor instance which delegates to many visitors to run in
* parallel. Each visitor will be visited for each node before moving on.
*
* If a prior visitor edits a node, no following visitors will see that node.
*/
export function visitInParallel(
visitors: $ReadOnlyArray<Visitor<ASTKindToNode>>,
): Visitor<ASTKindToNode> {
const skipping = new Array(visitors.length);
return {
enter(node) {
for (let i = 0; i < visitors.length; i++) {
if (skipping[i] == null) {
const fn = getVisitFn(visitors[i], node.kind, /* isLeaving */ false);
if (fn) {
const result = fn.apply(visitors[i], arguments);
if (result === false) {
skipping[i] = node;
} else if (result === BREAK) {
skipping[i] = BREAK;
} else if (result !== undefined) {
return result;
}
}
}
}
},
leave(node) {
for (let i = 0; i < visitors.length; i++) {
if (skipping[i] == null) {
const fn = getVisitFn(visitors[i], node.kind, /* isLeaving */ true);
if (fn) {
const result = fn.apply(visitors[i], arguments);
if (result === BREAK) {
skipping[i] = BREAK;
} else if (result !== undefined && result !== false) {
return result;
}
}
} else if (skipping[i] === node) {
skipping[i] = null;
}
}
},
};
}
/**
* Given a visitor instance, if it is leaving or not, and a node kind, return
* the function the visitor runtime should call.
*/
export function getVisitFn(
visitor: Visitor<any>,
kind: string,
isLeaving: boolean,
): ?VisitFn<any> {
const kindVisitor = visitor[kind];
if (kindVisitor) {
if (!isLeaving && typeof kindVisitor === 'function') {
// { Kind() {} }
return kindVisitor;
}
const kindSpecificVisitor = isLeaving
? kindVisitor.leave
: kindVisitor.enter;
if (typeof kindSpecificVisitor === 'function') {
// { Kind: { enter() {}, leave() {} } }
return kindSpecificVisitor;
}
} else {
const specificVisitor = isLeaving ? visitor.leave : visitor.enter;
if (specificVisitor) {
if (typeof specificVisitor === 'function') {
// { enter() {}, leave() {} }
return specificVisitor;
}
const specificKindVisitor = specificVisitor[kind];
if (typeof specificKindVisitor === 'function') {
// { enter: { Kind() {} }, leave: { Kind() {} } }
return specificKindVisitor;
}
}
}
}

View File

@@ -0,0 +1,383 @@
import inspect from "../jsutils/inspect.mjs";
import { isNode } from "./ast.mjs";
/**
* A visitor is provided to visit, it contains the collection of
* relevant functions to be called during the visitor's traversal.
*/
export var QueryDocumentKeys = {
Name: [],
Document: ['definitions'],
OperationDefinition: ['name', 'variableDefinitions', 'directives', 'selectionSet'],
VariableDefinition: ['variable', 'type', 'defaultValue', 'directives'],
Variable: ['name'],
SelectionSet: ['selections'],
Field: ['alias', 'name', 'arguments', 'directives', 'selectionSet'],
Argument: ['name', 'value'],
FragmentSpread: ['name', 'directives'],
InlineFragment: ['typeCondition', 'directives', 'selectionSet'],
FragmentDefinition: ['name', // Note: fragment variable definitions are experimental and may be changed
// or removed in the future.
'variableDefinitions', 'typeCondition', 'directives', 'selectionSet'],
IntValue: [],
FloatValue: [],
StringValue: [],
BooleanValue: [],
NullValue: [],
EnumValue: [],
ListValue: ['values'],
ObjectValue: ['fields'],
ObjectField: ['name', 'value'],
Directive: ['name', 'arguments'],
NamedType: ['name'],
ListType: ['type'],
NonNullType: ['type'],
SchemaDefinition: ['description', 'directives', 'operationTypes'],
OperationTypeDefinition: ['type'],
ScalarTypeDefinition: ['description', 'name', 'directives'],
ObjectTypeDefinition: ['description', 'name', 'interfaces', 'directives', 'fields'],
FieldDefinition: ['description', 'name', 'arguments', 'type', 'directives'],
InputValueDefinition: ['description', 'name', 'type', 'defaultValue', 'directives'],
InterfaceTypeDefinition: ['description', 'name', 'interfaces', 'directives', 'fields'],
UnionTypeDefinition: ['description', 'name', 'directives', 'types'],
EnumTypeDefinition: ['description', 'name', 'directives', 'values'],
EnumValueDefinition: ['description', 'name', 'directives'],
InputObjectTypeDefinition: ['description', 'name', 'directives', 'fields'],
DirectiveDefinition: ['description', 'name', 'arguments', 'locations'],
SchemaExtension: ['directives', 'operationTypes'],
ScalarTypeExtension: ['name', 'directives'],
ObjectTypeExtension: ['name', 'interfaces', 'directives', 'fields'],
InterfaceTypeExtension: ['name', 'interfaces', 'directives', 'fields'],
UnionTypeExtension: ['name', 'directives', 'types'],
EnumTypeExtension: ['name', 'directives', 'values'],
InputObjectTypeExtension: ['name', 'directives', 'fields']
};
export var BREAK = Object.freeze({});
/**
* visit() will walk through an AST using a depth-first traversal, calling
* the visitor's enter function at each node in the traversal, and calling the
* leave function after visiting that node and all of its child nodes.
*
* By returning different values from the enter and leave functions, the
* behavior of the visitor can be altered, including skipping over a sub-tree of
* the AST (by returning false), editing the AST by returning a value or null
* to remove the value, or to stop the whole traversal by returning BREAK.
*
* When using visit() to edit an AST, the original AST will not be modified, and
* a new version of the AST with the changes applied will be returned from the
* visit function.
*
* const editedAST = visit(ast, {
* enter(node, key, parent, path, ancestors) {
* // @return
* // undefined: no action
* // false: skip visiting this node
* // visitor.BREAK: stop visiting altogether
* // null: delete this node
* // any value: replace this node with the returned value
* },
* leave(node, key, parent, path, ancestors) {
* // @return
* // undefined: no action
* // false: no action
* // visitor.BREAK: stop visiting altogether
* // null: delete this node
* // any value: replace this node with the returned value
* }
* });
*
* Alternatively to providing enter() and leave() functions, a visitor can
* instead provide functions named the same as the kinds of AST nodes, or
* enter/leave visitors at a named key, leading to four permutations of the
* visitor API:
*
* 1) Named visitors triggered when entering a node of a specific kind.
*
* visit(ast, {
* Kind(node) {
* // enter the "Kind" node
* }
* })
*
* 2) Named visitors that trigger upon entering and leaving a node of
* a specific kind.
*
* visit(ast, {
* Kind: {
* enter(node) {
* // enter the "Kind" node
* }
* leave(node) {
* // leave the "Kind" node
* }
* }
* })
*
* 3) Generic visitors that trigger upon entering and leaving any node.
*
* visit(ast, {
* enter(node) {
* // enter any node
* },
* leave(node) {
* // leave any node
* }
* })
*
* 4) Parallel visitors for entering and leaving nodes of a specific kind.
*
* visit(ast, {
* enter: {
* Kind(node) {
* // enter the "Kind" node
* }
* },
* leave: {
* Kind(node) {
* // leave the "Kind" node
* }
* }
* })
*/
export function visit(root, visitor) {
var visitorKeys = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : QueryDocumentKeys;
/* eslint-disable no-undef-init */
var stack = undefined;
var inArray = Array.isArray(root);
var keys = [root];
var index = -1;
var edits = [];
var node = undefined;
var key = undefined;
var parent = undefined;
var path = [];
var ancestors = [];
var newRoot = root;
/* eslint-enable no-undef-init */
do {
index++;
var isLeaving = index === keys.length;
var isEdited = isLeaving && edits.length !== 0;
if (isLeaving) {
key = ancestors.length === 0 ? undefined : path[path.length - 1];
node = parent;
parent = ancestors.pop();
if (isEdited) {
if (inArray) {
node = node.slice();
} else {
var clone = {};
for (var _i2 = 0, _Object$keys2 = Object.keys(node); _i2 < _Object$keys2.length; _i2++) {
var k = _Object$keys2[_i2];
clone[k] = node[k];
}
node = clone;
}
var editOffset = 0;
for (var ii = 0; ii < edits.length; ii++) {
var editKey = edits[ii][0];
var editValue = edits[ii][1];
if (inArray) {
editKey -= editOffset;
}
if (inArray && editValue === null) {
node.splice(editKey, 1);
editOffset++;
} else {
node[editKey] = editValue;
}
}
}
index = stack.index;
keys = stack.keys;
edits = stack.edits;
inArray = stack.inArray;
stack = stack.prev;
} else {
key = parent ? inArray ? index : keys[index] : undefined;
node = parent ? parent[key] : newRoot;
if (node === null || node === undefined) {
continue;
}
if (parent) {
path.push(key);
}
}
var result = void 0;
if (!Array.isArray(node)) {
if (!isNode(node)) {
throw new Error("Invalid AST Node: ".concat(inspect(node), "."));
}
var visitFn = getVisitFn(visitor, node.kind, isLeaving);
if (visitFn) {
result = visitFn.call(visitor, node, key, parent, path, ancestors);
if (result === BREAK) {
break;
}
if (result === false) {
if (!isLeaving) {
path.pop();
continue;
}
} else if (result !== undefined) {
edits.push([key, result]);
if (!isLeaving) {
if (isNode(result)) {
node = result;
} else {
path.pop();
continue;
}
}
}
}
}
if (result === undefined && isEdited) {
edits.push([key, node]);
}
if (isLeaving) {
path.pop();
} else {
var _visitorKeys$node$kin;
stack = {
inArray: inArray,
index: index,
keys: keys,
edits: edits,
prev: stack
};
inArray = Array.isArray(node);
keys = inArray ? node : (_visitorKeys$node$kin = visitorKeys[node.kind]) !== null && _visitorKeys$node$kin !== void 0 ? _visitorKeys$node$kin : [];
index = -1;
edits = [];
if (parent) {
ancestors.push(parent);
}
parent = node;
}
} while (stack !== undefined);
if (edits.length !== 0) {
newRoot = edits[edits.length - 1][1];
}
return newRoot;
}
/**
* Creates a new visitor instance which delegates to many visitors to run in
* parallel. Each visitor will be visited for each node before moving on.
*
* If a prior visitor edits a node, no following visitors will see that node.
*/
export function visitInParallel(visitors) {
var skipping = new Array(visitors.length);
return {
enter: function enter(node) {
for (var i = 0; i < visitors.length; i++) {
if (skipping[i] == null) {
var fn = getVisitFn(visitors[i], node.kind,
/* isLeaving */
false);
if (fn) {
var result = fn.apply(visitors[i], arguments);
if (result === false) {
skipping[i] = node;
} else if (result === BREAK) {
skipping[i] = BREAK;
} else if (result !== undefined) {
return result;
}
}
}
}
},
leave: function leave(node) {
for (var i = 0; i < visitors.length; i++) {
if (skipping[i] == null) {
var fn = getVisitFn(visitors[i], node.kind,
/* isLeaving */
true);
if (fn) {
var result = fn.apply(visitors[i], arguments);
if (result === BREAK) {
skipping[i] = BREAK;
} else if (result !== undefined && result !== false) {
return result;
}
}
} else if (skipping[i] === node) {
skipping[i] = null;
}
}
}
};
}
/**
* Given a visitor instance, if it is leaving or not, and a node kind, return
* the function the visitor runtime should call.
*/
export function getVisitFn(visitor, kind, isLeaving) {
var kindVisitor = visitor[kind];
if (kindVisitor) {
if (!isLeaving && typeof kindVisitor === 'function') {
// { Kind() {} }
return kindVisitor;
}
var kindSpecificVisitor = isLeaving ? kindVisitor.leave : kindVisitor.enter;
if (typeof kindSpecificVisitor === 'function') {
// { Kind: { enter() {}, leave() {} } }
return kindSpecificVisitor;
}
} else {
var specificVisitor = isLeaving ? visitor.leave : visitor.enter;
if (specificVisitor) {
if (typeof specificVisitor === 'function') {
// { enter() {}, leave() {} }
return specificVisitor;
}
var specificKindVisitor = specificVisitor[kind];
if (typeof specificKindVisitor === 'function') {
// { enter: { Kind() {} }, leave: { Kind() {} } }
return specificKindVisitor;
}
}
}
}